code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
---|---|---|---|
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('cms', '0020_old_tree_cleanup'), ('styleguide',
'0002_flexcontainer')]
operations = [migrations.CreateModel(name='ContentSection', fields=[(
'cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=
django.db.models.deletion.CASCADE, parent_link=True, primary_key=
True, related_name='styleguide_contentsection', serialize=False, to
='cms.CMSPlugin')), ('background_color', models.CharField(choices=[
('navy', '#1c2532'), ('light', '#f3f4f5'), ('white', '#ffffff')],
default='white', max_length=20))], options={'abstract': False},
bases=('cms.cmsplugin',)), migrations.AlterField(model_name=
'flexcontainer', name='spacing', field=models.CharField(choices=[(
'flex-start', 'flex-start'), ('flex-end', 'flex-end'), ('center',
'center'), ('space-between', 'space-between'), ('space-around',
'space-around')], default='flex-start', max_length=13))]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [('cms', '0020_old_tree_cleanup'), ('styleguide',
'0002_flexcontainer')]
operations = [migrations.CreateModel(name='ContentSection', fields=[(
'cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=
django.db.models.deletion.CASCADE, parent_link=True, primary_key=
True, related_name='styleguide_contentsection', serialize=False, to
='cms.CMSPlugin')), ('background_color', models.CharField(choices=[
('navy', '#1c2532'), ('light', '#f3f4f5'), ('white', '#ffffff')],
default='white', max_length=20))], options={'abstract': False},
bases=('cms.cmsplugin',)), migrations.AlterField(model_name=
'flexcontainer', name='spacing', field=models.CharField(choices=[(
'flex-start', 'flex-start'), ('flex-end', 'flex-end'), ('center',
'center'), ('space-between', 'space-between'), ('space-around',
'space-around')], default='flex-start', max_length=13))]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-27 21:49
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cms', '0020_old_tree_cleanup'),
('styleguide', '0002_flexcontainer'),
]
operations = [
migrations.CreateModel(
name='ContentSection',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='styleguide_contentsection', serialize=False, to='cms.CMSPlugin')),
('background_color', models.CharField(choices=[('navy', '#1c2532'), ('light', '#f3f4f5'), ('white', '#ffffff')], default='white', max_length=20)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.AlterField(
model_name='flexcontainer',
name='spacing',
field=models.CharField(choices=[('flex-start', 'flex-start'), ('flex-end', 'flex-end'), ('center', 'center'), ('space-between', 'space-between'), ('space-around', 'space-around')], default='flex-start', max_length=13),
),
]
|
flexible
|
{
"blob_id": "85c2a4163a3132794186b95b4068f6c6e1104828",
"index": 1306,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('cms', '0020_old_tree_cleanup'), ('styleguide',\n '0002_flexcontainer')]\n operations = [migrations.CreateModel(name='ContentSection', fields=[(\n 'cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=\n django.db.models.deletion.CASCADE, parent_link=True, primary_key=\n True, related_name='styleguide_contentsection', serialize=False, to\n ='cms.CMSPlugin')), ('background_color', models.CharField(choices=[\n ('navy', '#1c2532'), ('light', '#f3f4f5'), ('white', '#ffffff')],\n default='white', max_length=20))], options={'abstract': False},\n bases=('cms.cmsplugin',)), migrations.AlterField(model_name=\n 'flexcontainer', name='spacing', field=models.CharField(choices=[(\n 'flex-start', 'flex-start'), ('flex-end', 'flex-end'), ('center',\n 'center'), ('space-between', 'space-between'), ('space-around',\n 'space-around')], default='flex-start', max_length=13))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('cms', '0020_old_tree_cleanup'), ('styleguide',\n '0002_flexcontainer')]\n operations = [migrations.CreateModel(name='ContentSection', fields=[(\n 'cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=\n django.db.models.deletion.CASCADE, parent_link=True, primary_key=\n True, related_name='styleguide_contentsection', serialize=False, to\n ='cms.CMSPlugin')), ('background_color', models.CharField(choices=[\n ('navy', '#1c2532'), ('light', '#f3f4f5'), ('white', '#ffffff')],\n default='white', max_length=20))], options={'abstract': False},\n bases=('cms.cmsplugin',)), migrations.AlterField(model_name=\n 'flexcontainer', name='spacing', field=models.CharField(choices=[(\n 'flex-start', 'flex-start'), ('flex-end', 'flex-end'), ('center',\n 'center'), ('space-between', 'space-between'), ('space-around',\n 'space-around')], default='flex-start', max_length=13))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.13 on 2018-06-27 21:49\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('cms', '0020_old_tree_cleanup'),\n ('styleguide', '0002_flexcontainer'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='ContentSection',\n fields=[\n ('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='styleguide_contentsection', serialize=False, to='cms.CMSPlugin')),\n ('background_color', models.CharField(choices=[('navy', '#1c2532'), ('light', '#f3f4f5'), ('white', '#ffffff')], default='white', max_length=20)),\n ],\n options={\n 'abstract': False,\n },\n bases=('cms.cmsplugin',),\n ),\n migrations.AlterField(\n model_name='flexcontainer',\n name='spacing',\n field=models.CharField(choices=[('flex-start', 'flex-start'), ('flex-end', 'flex-end'), ('center', 'center'), ('space-between', 'space-between'), ('space-around', 'space-around')], default='flex-start', max_length=13),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def get_similarity(word1, word2):
"""
Returns a similarity score between two words
"""
tok1 = cache.get(word1, nlp(word1))
tok2 = cache.get(word2, nlp(word2))
return tok1.similarity(tok2)
<|reserved_special_token_0|>
def get_closest_words(word, choices, n=1):
"""
Returns the n closest matches in the model vocab
Parameters:
word word to search
choices available matches
n number of results to return
Returns:
A list of n tuples in the form (word (str), similarity (float))
"""
app.logger.info(f'Finding closest words to "{word}"')
if word in choices:
return [(word, 1.0)]
if word in nlp.vocab.strings:
similarities = [(choice, get_similarity(word, choice)) for choice in
choices]
return sorted(similarities, key=lambda x: x[1])[::-1][:n]
else:
app.logger.info(f'Not in model vocab: "{word}"')
return [(get_random_word(), 1.0), (word, 0.0)]
def find_matching_quote(genre, top_n=5):
"""
Returns a matching quote and up to 5 of the most similar genres with similarity measures
Paramters:
genre genre to match
Returns:
(str) Quote
(str) Author
(list) List of tuples in the form (word (str), simliarity (float))
"""
matched_genres = get_closest_words(genre, q.GENRE.unique(), top_n)
closest = matched_genres[0][0]
app.logger.info(f'Finding quote for: "{closest}"')
matching_quote = q[q['GENRE'] == closest].sample(1).iloc[0]
quote = matching_quote.QUOTE
author = matching_quote.AUTHOR
return quote, author, matched_genres
<|reserved_special_token_1|>
<|reserved_special_token_0|>
q['GENRE'].replace(to_replace=replace, inplace=True)
<|reserved_special_token_0|>
def get_similarity(word1, word2):
"""
Returns a similarity score between two words
"""
tok1 = cache.get(word1, nlp(word1))
tok2 = cache.get(word2, nlp(word2))
return tok1.similarity(tok2)
def get_random_word():
"""
Returns a random category label from the data
"""
random_word = q['GENRE'].sample(1).iloc[0]
return random_word
def get_closest_words(word, choices, n=1):
"""
Returns the n closest matches in the model vocab
Parameters:
word word to search
choices available matches
n number of results to return
Returns:
A list of n tuples in the form (word (str), similarity (float))
"""
app.logger.info(f'Finding closest words to "{word}"')
if word in choices:
return [(word, 1.0)]
if word in nlp.vocab.strings:
similarities = [(choice, get_similarity(word, choice)) for choice in
choices]
return sorted(similarities, key=lambda x: x[1])[::-1][:n]
else:
app.logger.info(f'Not in model vocab: "{word}"')
return [(get_random_word(), 1.0), (word, 0.0)]
def find_matching_quote(genre, top_n=5):
"""
Returns a matching quote and up to 5 of the most similar genres with similarity measures
Paramters:
genre genre to match
Returns:
(str) Quote
(str) Author
(list) List of tuples in the form (word (str), simliarity (float))
"""
matched_genres = get_closest_words(genre, q.GENRE.unique(), top_n)
closest = matched_genres[0][0]
app.logger.info(f'Finding quote for: "{closest}"')
matching_quote = q[q['GENRE'] == closest].sample(1).iloc[0]
quote = matching_quote.QUOTE
author = matching_quote.AUTHOR
return quote, author, matched_genres
<|reserved_special_token_1|>
<|reserved_special_token_0|>
q = pd.read_csv(os.path.join('app', 'data', 'quotes_all.csv'), sep=';',
skiprows=1, header=0)
replace = {'movingon': 'moving', 'fathersday': 'fathers', 'memorialday':
'memorial', 'mothersday': 'mothers', 'newyears': 'year',
'saintpatricksday': 'ireland', 'valentinesday': 'valentine'}
q['GENRE'].replace(to_replace=replace, inplace=True)
<|reserved_special_token_0|>
nlp = spacy.load('en_core_web_md')
cache = {genre: nlp(genre) for genre in q.GENRE.unique()}
def get_similarity(word1, word2):
"""
Returns a similarity score between two words
"""
tok1 = cache.get(word1, nlp(word1))
tok2 = cache.get(word2, nlp(word2))
return tok1.similarity(tok2)
def get_random_word():
"""
Returns a random category label from the data
"""
random_word = q['GENRE'].sample(1).iloc[0]
return random_word
def get_closest_words(word, choices, n=1):
"""
Returns the n closest matches in the model vocab
Parameters:
word word to search
choices available matches
n number of results to return
Returns:
A list of n tuples in the form (word (str), similarity (float))
"""
app.logger.info(f'Finding closest words to "{word}"')
if word in choices:
return [(word, 1.0)]
if word in nlp.vocab.strings:
similarities = [(choice, get_similarity(word, choice)) for choice in
choices]
return sorted(similarities, key=lambda x: x[1])[::-1][:n]
else:
app.logger.info(f'Not in model vocab: "{word}"')
return [(get_random_word(), 1.0), (word, 0.0)]
def find_matching_quote(genre, top_n=5):
"""
Returns a matching quote and up to 5 of the most similar genres with similarity measures
Paramters:
genre genre to match
Returns:
(str) Quote
(str) Author
(list) List of tuples in the form (word (str), simliarity (float))
"""
matched_genres = get_closest_words(genre, q.GENRE.unique(), top_n)
closest = matched_genres[0][0]
app.logger.info(f'Finding quote for: "{closest}"')
matching_quote = q[q['GENRE'] == closest].sample(1).iloc[0]
quote = matching_quote.QUOTE
author = matching_quote.AUTHOR
return quote, author, matched_genres
<|reserved_special_token_1|>
import os
from app import app
import pandas as pd
q = pd.read_csv(os.path.join('app', 'data', 'quotes_all.csv'), sep=';',
skiprows=1, header=0)
replace = {'movingon': 'moving', 'fathersday': 'fathers', 'memorialday':
'memorial', 'mothersday': 'mothers', 'newyears': 'year',
'saintpatricksday': 'ireland', 'valentinesday': 'valentine'}
q['GENRE'].replace(to_replace=replace, inplace=True)
import spacy
nlp = spacy.load('en_core_web_md')
cache = {genre: nlp(genre) for genre in q.GENRE.unique()}
def get_similarity(word1, word2):
"""
Returns a similarity score between two words
"""
tok1 = cache.get(word1, nlp(word1))
tok2 = cache.get(word2, nlp(word2))
return tok1.similarity(tok2)
def get_random_word():
"""
Returns a random category label from the data
"""
random_word = q['GENRE'].sample(1).iloc[0]
return random_word
def get_closest_words(word, choices, n=1):
"""
Returns the n closest matches in the model vocab
Parameters:
word word to search
choices available matches
n number of results to return
Returns:
A list of n tuples in the form (word (str), similarity (float))
"""
app.logger.info(f'Finding closest words to "{word}"')
if word in choices:
return [(word, 1.0)]
if word in nlp.vocab.strings:
similarities = [(choice, get_similarity(word, choice)) for choice in
choices]
return sorted(similarities, key=lambda x: x[1])[::-1][:n]
else:
app.logger.info(f'Not in model vocab: "{word}"')
return [(get_random_word(), 1.0), (word, 0.0)]
def find_matching_quote(genre, top_n=5):
"""
Returns a matching quote and up to 5 of the most similar genres with similarity measures
Paramters:
genre genre to match
Returns:
(str) Quote
(str) Author
(list) List of tuples in the form (word (str), simliarity (float))
"""
matched_genres = get_closest_words(genre, q.GENRE.unique(), top_n)
closest = matched_genres[0][0]
app.logger.info(f'Finding quote for: "{closest}"')
matching_quote = q[q['GENRE'] == closest].sample(1).iloc[0]
quote = matching_quote.QUOTE
author = matching_quote.AUTHOR
return quote, author, matched_genres
<|reserved_special_token_1|>
## Author: Aleem Juma
import os
from app import app
import pandas as pd
# read in the quotes database
q = pd.read_csv(os.path.join('app','data','quotes_all.csv'), sep=';', skiprows=1, header=0)
# there are a few quote genres that don't occur in the model vocab
# replace them with appropriate words so the similarity search works
replace = {
'movingon':'moving',
'fathersday': 'fathers',
'memorialday': 'memorial',
'mothersday': 'mothers',
'newyears': 'year',
'saintpatricksday': 'ireland',
'valentinesday': 'valentine'
}
q['GENRE'].replace(to_replace=replace, inplace=True)
import spacy
nlp = spacy.load('en_core_web_md')
# cache the computed tokens for the genres in the dataset
cache = {genre:nlp(genre) for genre in q.GENRE.unique()}
def get_similarity(word1, word2):
'''
Returns a similarity score between two words
'''
tok1 = cache.get(word1, nlp(word1))
tok2 = cache.get(word2, nlp(word2))
return tok1.similarity(tok2)
def get_random_word():
'''
Returns a random category label from the data
'''
random_word = q['GENRE'].sample(1).iloc[0]
return random_word
def get_closest_words(word, choices, n=1):
'''
Returns the n closest matches in the model vocab
Parameters:
word word to search
choices available matches
n number of results to return
Returns:
A list of n tuples in the form (word (str), similarity (float))
'''
app.logger.info(f'Finding closest words to "{word}"')
if word in choices:
# if the word is already in the list return the same word with 100% match
return [(word, 1.0)]
if word in nlp.vocab.strings:
# if not in the list, find the closest words
similarities = [(choice, get_similarity(word, choice)) for choice in choices]
# sort, reverse, and return the top n (word,similarity) tuples
return sorted(similarities, key=lambda x: x[1])[::-1][:n]
else:
app.logger.info(f'Not in model vocab: "{word}"')
# if the requested label isn't in the model vocab, return a random genre
return [(get_random_word(), 1.0), (word, 0.0)]
def find_matching_quote(genre, top_n=5):
'''
Returns a matching quote and up to 5 of the most similar genres with similarity measures
Paramters:
genre genre to match
Returns:
(str) Quote
(str) Author
(list) List of tuples in the form (word (str), simliarity (float))
'''
# find closest matches
matched_genres = get_closest_words(genre, q.GENRE.unique(), top_n)
# get the best one
closest = matched_genres[0][0]
app.logger.info(f'Finding quote for: "{closest}"')
# get a quote from that genre
matching_quote = q[q['GENRE']==closest].sample(1).iloc[0]
quote = matching_quote.QUOTE
author = matching_quote.AUTHOR
# return the quote and the genres
return quote, author, matched_genres
|
flexible
|
{
"blob_id": "8f854f4f2c807f988945af4dc53dba93cfb31168",
"index": 9441,
"step-1": "<mask token>\n\n\ndef get_similarity(word1, word2):\n \"\"\"\n Returns a similarity score between two words\n \"\"\"\n tok1 = cache.get(word1, nlp(word1))\n tok2 = cache.get(word2, nlp(word2))\n return tok1.similarity(tok2)\n\n\n<mask token>\n\n\ndef get_closest_words(word, choices, n=1):\n \"\"\"\n Returns the n closest matches in the model vocab\n Parameters:\n word word to search\n choices available matches\n n number of results to return\n\n Returns:\n A list of n tuples in the form (word (str), similarity (float))\n \"\"\"\n app.logger.info(f'Finding closest words to \"{word}\"')\n if word in choices:\n return [(word, 1.0)]\n if word in nlp.vocab.strings:\n similarities = [(choice, get_similarity(word, choice)) for choice in\n choices]\n return sorted(similarities, key=lambda x: x[1])[::-1][:n]\n else:\n app.logger.info(f'Not in model vocab: \"{word}\"')\n return [(get_random_word(), 1.0), (word, 0.0)]\n\n\ndef find_matching_quote(genre, top_n=5):\n \"\"\"\n Returns a matching quote and up to 5 of the most similar genres with similarity measures\n Paramters:\n genre genre to match\n\n Returns:\n (str) Quote\n (str) Author\n (list) List of tuples in the form (word (str), simliarity (float))\n \"\"\"\n matched_genres = get_closest_words(genre, q.GENRE.unique(), top_n)\n closest = matched_genres[0][0]\n app.logger.info(f'Finding quote for: \"{closest}\"')\n matching_quote = q[q['GENRE'] == closest].sample(1).iloc[0]\n quote = matching_quote.QUOTE\n author = matching_quote.AUTHOR\n return quote, author, matched_genres\n",
"step-2": "<mask token>\nq['GENRE'].replace(to_replace=replace, inplace=True)\n<mask token>\n\n\ndef get_similarity(word1, word2):\n \"\"\"\n Returns a similarity score between two words\n \"\"\"\n tok1 = cache.get(word1, nlp(word1))\n tok2 = cache.get(word2, nlp(word2))\n return tok1.similarity(tok2)\n\n\ndef get_random_word():\n \"\"\"\n Returns a random category label from the data\n \"\"\"\n random_word = q['GENRE'].sample(1).iloc[0]\n return random_word\n\n\ndef get_closest_words(word, choices, n=1):\n \"\"\"\n Returns the n closest matches in the model vocab\n Parameters:\n word word to search\n choices available matches\n n number of results to return\n\n Returns:\n A list of n tuples in the form (word (str), similarity (float))\n \"\"\"\n app.logger.info(f'Finding closest words to \"{word}\"')\n if word in choices:\n return [(word, 1.0)]\n if word in nlp.vocab.strings:\n similarities = [(choice, get_similarity(word, choice)) for choice in\n choices]\n return sorted(similarities, key=lambda x: x[1])[::-1][:n]\n else:\n app.logger.info(f'Not in model vocab: \"{word}\"')\n return [(get_random_word(), 1.0), (word, 0.0)]\n\n\ndef find_matching_quote(genre, top_n=5):\n \"\"\"\n Returns a matching quote and up to 5 of the most similar genres with similarity measures\n Paramters:\n genre genre to match\n\n Returns:\n (str) Quote\n (str) Author\n (list) List of tuples in the form (word (str), simliarity (float))\n \"\"\"\n matched_genres = get_closest_words(genre, q.GENRE.unique(), top_n)\n closest = matched_genres[0][0]\n app.logger.info(f'Finding quote for: \"{closest}\"')\n matching_quote = q[q['GENRE'] == closest].sample(1).iloc[0]\n quote = matching_quote.QUOTE\n author = matching_quote.AUTHOR\n return quote, author, matched_genres\n",
"step-3": "<mask token>\nq = pd.read_csv(os.path.join('app', 'data', 'quotes_all.csv'), sep=';',\n skiprows=1, header=0)\nreplace = {'movingon': 'moving', 'fathersday': 'fathers', 'memorialday':\n 'memorial', 'mothersday': 'mothers', 'newyears': 'year',\n 'saintpatricksday': 'ireland', 'valentinesday': 'valentine'}\nq['GENRE'].replace(to_replace=replace, inplace=True)\n<mask token>\nnlp = spacy.load('en_core_web_md')\ncache = {genre: nlp(genre) for genre in q.GENRE.unique()}\n\n\ndef get_similarity(word1, word2):\n \"\"\"\n Returns a similarity score between two words\n \"\"\"\n tok1 = cache.get(word1, nlp(word1))\n tok2 = cache.get(word2, nlp(word2))\n return tok1.similarity(tok2)\n\n\ndef get_random_word():\n \"\"\"\n Returns a random category label from the data\n \"\"\"\n random_word = q['GENRE'].sample(1).iloc[0]\n return random_word\n\n\ndef get_closest_words(word, choices, n=1):\n \"\"\"\n Returns the n closest matches in the model vocab\n Parameters:\n word word to search\n choices available matches\n n number of results to return\n\n Returns:\n A list of n tuples in the form (word (str), similarity (float))\n \"\"\"\n app.logger.info(f'Finding closest words to \"{word}\"')\n if word in choices:\n return [(word, 1.0)]\n if word in nlp.vocab.strings:\n similarities = [(choice, get_similarity(word, choice)) for choice in\n choices]\n return sorted(similarities, key=lambda x: x[1])[::-1][:n]\n else:\n app.logger.info(f'Not in model vocab: \"{word}\"')\n return [(get_random_word(), 1.0), (word, 0.0)]\n\n\ndef find_matching_quote(genre, top_n=5):\n \"\"\"\n Returns a matching quote and up to 5 of the most similar genres with similarity measures\n Paramters:\n genre genre to match\n\n Returns:\n (str) Quote\n (str) Author\n (list) List of tuples in the form (word (str), simliarity (float))\n \"\"\"\n matched_genres = get_closest_words(genre, q.GENRE.unique(), top_n)\n closest = matched_genres[0][0]\n app.logger.info(f'Finding quote for: \"{closest}\"')\n matching_quote = q[q['GENRE'] == closest].sample(1).iloc[0]\n quote = matching_quote.QUOTE\n author = matching_quote.AUTHOR\n return quote, author, matched_genres\n",
"step-4": "import os\nfrom app import app\nimport pandas as pd\nq = pd.read_csv(os.path.join('app', 'data', 'quotes_all.csv'), sep=';',\n skiprows=1, header=0)\nreplace = {'movingon': 'moving', 'fathersday': 'fathers', 'memorialday':\n 'memorial', 'mothersday': 'mothers', 'newyears': 'year',\n 'saintpatricksday': 'ireland', 'valentinesday': 'valentine'}\nq['GENRE'].replace(to_replace=replace, inplace=True)\nimport spacy\nnlp = spacy.load('en_core_web_md')\ncache = {genre: nlp(genre) for genre in q.GENRE.unique()}\n\n\ndef get_similarity(word1, word2):\n \"\"\"\n Returns a similarity score between two words\n \"\"\"\n tok1 = cache.get(word1, nlp(word1))\n tok2 = cache.get(word2, nlp(word2))\n return tok1.similarity(tok2)\n\n\ndef get_random_word():\n \"\"\"\n Returns a random category label from the data\n \"\"\"\n random_word = q['GENRE'].sample(1).iloc[0]\n return random_word\n\n\ndef get_closest_words(word, choices, n=1):\n \"\"\"\n Returns the n closest matches in the model vocab\n Parameters:\n word word to search\n choices available matches\n n number of results to return\n\n Returns:\n A list of n tuples in the form (word (str), similarity (float))\n \"\"\"\n app.logger.info(f'Finding closest words to \"{word}\"')\n if word in choices:\n return [(word, 1.0)]\n if word in nlp.vocab.strings:\n similarities = [(choice, get_similarity(word, choice)) for choice in\n choices]\n return sorted(similarities, key=lambda x: x[1])[::-1][:n]\n else:\n app.logger.info(f'Not in model vocab: \"{word}\"')\n return [(get_random_word(), 1.0), (word, 0.0)]\n\n\ndef find_matching_quote(genre, top_n=5):\n \"\"\"\n Returns a matching quote and up to 5 of the most similar genres with similarity measures\n Paramters:\n genre genre to match\n\n Returns:\n (str) Quote\n (str) Author\n (list) List of tuples in the form (word (str), simliarity (float))\n \"\"\"\n matched_genres = get_closest_words(genre, q.GENRE.unique(), top_n)\n closest = matched_genres[0][0]\n app.logger.info(f'Finding quote for: \"{closest}\"')\n matching_quote = q[q['GENRE'] == closest].sample(1).iloc[0]\n quote = matching_quote.QUOTE\n author = matching_quote.AUTHOR\n return quote, author, matched_genres\n",
"step-5": "## Author: Aleem Juma\n\nimport os\nfrom app import app\nimport pandas as pd\n\n# read in the quotes database\nq = pd.read_csv(os.path.join('app','data','quotes_all.csv'), sep=';', skiprows=1, header=0)\n\n# there are a few quote genres that don't occur in the model vocab\n# replace them with appropriate words so the similarity search works\nreplace = {\n 'movingon':'moving',\n 'fathersday': 'fathers',\n 'memorialday': 'memorial',\n 'mothersday': 'mothers',\n 'newyears': 'year',\n 'saintpatricksday': 'ireland',\n 'valentinesday': 'valentine'\n}\nq['GENRE'].replace(to_replace=replace, inplace=True)\n\nimport spacy\nnlp = spacy.load('en_core_web_md')\n# cache the computed tokens for the genres in the dataset\ncache = {genre:nlp(genre) for genre in q.GENRE.unique()}\n\ndef get_similarity(word1, word2):\n '''\n Returns a similarity score between two words\n '''\n tok1 = cache.get(word1, nlp(word1))\n tok2 = cache.get(word2, nlp(word2))\n return tok1.similarity(tok2)\n\ndef get_random_word():\n '''\n Returns a random category label from the data\n '''\n random_word = q['GENRE'].sample(1).iloc[0]\n return random_word\n\ndef get_closest_words(word, choices, n=1):\n '''\n Returns the n closest matches in the model vocab\n Parameters:\n word word to search\n choices available matches\n n number of results to return\n\n Returns:\n A list of n tuples in the form (word (str), similarity (float))\n '''\n app.logger.info(f'Finding closest words to \"{word}\"')\n if word in choices:\n # if the word is already in the list return the same word with 100% match\n return [(word, 1.0)]\n if word in nlp.vocab.strings:\n # if not in the list, find the closest words\n similarities = [(choice, get_similarity(word, choice)) for choice in choices]\n # sort, reverse, and return the top n (word,similarity) tuples\n return sorted(similarities, key=lambda x: x[1])[::-1][:n]\n else:\n app.logger.info(f'Not in model vocab: \"{word}\"')\n # if the requested label isn't in the model vocab, return a random genre\n return [(get_random_word(), 1.0), (word, 0.0)]\n\ndef find_matching_quote(genre, top_n=5):\n '''\n Returns a matching quote and up to 5 of the most similar genres with similarity measures\n Paramters:\n genre genre to match\n\n Returns:\n (str) Quote\n (str) Author\n (list) List of tuples in the form (word (str), simliarity (float))\n '''\n # find closest matches\n matched_genres = get_closest_words(genre, q.GENRE.unique(), top_n)\n # get the best one\n closest = matched_genres[0][0]\n app.logger.info(f'Finding quote for: \"{closest}\"')\n # get a quote from that genre\n matching_quote = q[q['GENRE']==closest].sample(1).iloc[0]\n quote = matching_quote.QUOTE\n author = matching_quote.AUTHOR\n # return the quote and the genres\n return quote, author, matched_genres\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
import itertools
import numpy as np
SAMPLER_CACHE = 10000
def cache_gen(source):
values = source()
while True:
for value in values:
yield value
values = source()
class Sampler:
"""Provides precomputed random samples of various distribution."""
randn_gen = cache_gen(lambda : np.random.standard_normal(SAMPLER_CACHE))
rand_gen = cache_gen(lambda : np.random.random(SAMPLER_CACHE))
@classmethod
def standard_normal(cls, size=1):
return list(itertools.islice(cls.randn_gen, size))
@classmethod
def randn(cls):
return next(cls.randn_gen)
@classmethod
def rand(cls):
return next(cls.rand_gen)
@classmethod
def rint(cls, max_exclusive):
return np.random.randint(max_exclusive)
|
normal
|
{
"blob_id": "ddeff852e41b79fb71cea1e4dc71248ddef85d79",
"index": 7033,
"step-1": "<mask token>\n\n\nclass Sampler:\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def standard_normal(cls, size=1):\n return list(itertools.islice(cls.randn_gen, size))\n\n @classmethod\n def randn(cls):\n return next(cls.randn_gen)\n\n @classmethod\n def rand(cls):\n return next(cls.rand_gen)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Sampler:\n \"\"\"Provides precomputed random samples of various distribution.\"\"\"\n randn_gen = cache_gen(lambda : np.random.standard_normal(SAMPLER_CACHE))\n rand_gen = cache_gen(lambda : np.random.random(SAMPLER_CACHE))\n\n @classmethod\n def standard_normal(cls, size=1):\n return list(itertools.islice(cls.randn_gen, size))\n\n @classmethod\n def randn(cls):\n return next(cls.randn_gen)\n\n @classmethod\n def rand(cls):\n return next(cls.rand_gen)\n\n @classmethod\n def rint(cls, max_exclusive):\n return np.random.randint(max_exclusive)\n",
"step-3": "<mask token>\nSAMPLER_CACHE = 10000\n\n\ndef cache_gen(source):\n values = source()\n while True:\n for value in values:\n yield value\n values = source()\n\n\nclass Sampler:\n \"\"\"Provides precomputed random samples of various distribution.\"\"\"\n randn_gen = cache_gen(lambda : np.random.standard_normal(SAMPLER_CACHE))\n rand_gen = cache_gen(lambda : np.random.random(SAMPLER_CACHE))\n\n @classmethod\n def standard_normal(cls, size=1):\n return list(itertools.islice(cls.randn_gen, size))\n\n @classmethod\n def randn(cls):\n return next(cls.randn_gen)\n\n @classmethod\n def rand(cls):\n return next(cls.rand_gen)\n\n @classmethod\n def rint(cls, max_exclusive):\n return np.random.randint(max_exclusive)\n",
"step-4": "import itertools\nimport numpy as np\nSAMPLER_CACHE = 10000\n\n\ndef cache_gen(source):\n values = source()\n while True:\n for value in values:\n yield value\n values = source()\n\n\nclass Sampler:\n \"\"\"Provides precomputed random samples of various distribution.\"\"\"\n randn_gen = cache_gen(lambda : np.random.standard_normal(SAMPLER_CACHE))\n rand_gen = cache_gen(lambda : np.random.random(SAMPLER_CACHE))\n\n @classmethod\n def standard_normal(cls, size=1):\n return list(itertools.islice(cls.randn_gen, size))\n\n @classmethod\n def randn(cls):\n return next(cls.randn_gen)\n\n @classmethod\n def rand(cls):\n return next(cls.rand_gen)\n\n @classmethod\n def rint(cls, max_exclusive):\n return np.random.randint(max_exclusive)\n",
"step-5": null,
"step-ids": [
4,
7,
9,
10
]
}
|
[
4,
7,
9,
10
] |
def encrypt(key,plaintext):
ciphertext=""
for i in plaintext:
if i.isalpha():
alphabet = ord(i)+key
if alphabet > ord("Z"):
alphabet -= 26
letter = chr(alphabet)
ciphertext+=letter
return ciphertext
def decrypt(key,ciphertext):
plaintext=""
for i in ciphertext:
if i.isalpha():
alphabet = ord(i)-key
if alphabet < ord("A"):
alphabet += 26
letter = chr(alphabet)
plaintext+=letter
return plaintext
|
normal
|
{
"blob_id": "ac31cba94ee8ff7a2903a675954c937c567b5a56",
"index": 6739,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef decrypt(key, ciphertext):\n plaintext = ''\n for i in ciphertext:\n if i.isalpha():\n alphabet = ord(i) - key\n if alphabet < ord('A'):\n alphabet += 26\n letter = chr(alphabet)\n plaintext += letter\n return plaintext\n",
"step-3": "def encrypt(key, plaintext):\n ciphertext = ''\n for i in plaintext:\n if i.isalpha():\n alphabet = ord(i) + key\n if alphabet > ord('Z'):\n alphabet -= 26\n letter = chr(alphabet)\n ciphertext += letter\n return ciphertext\n\n\ndef decrypt(key, ciphertext):\n plaintext = ''\n for i in ciphertext:\n if i.isalpha():\n alphabet = ord(i) - key\n if alphabet < ord('A'):\n alphabet += 26\n letter = chr(alphabet)\n plaintext += letter\n return plaintext\n",
"step-4": "\ndef encrypt(key,plaintext):\n ciphertext=\"\"\n\n for i in plaintext:\n if i.isalpha():\n alphabet = ord(i)+key\n if alphabet > ord(\"Z\"):\n alphabet -= 26\n letter = chr(alphabet)\n ciphertext+=letter\n\n return ciphertext\n\ndef decrypt(key,ciphertext):\n plaintext=\"\"\n for i in ciphertext:\n if i.isalpha():\n alphabet = ord(i)-key\n if alphabet < ord(\"A\"):\n alphabet += 26\n letter = chr(alphabet)\n plaintext+=letter\n\n return plaintext\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Net(nn.Module):
def __init__(self, input_size, hidden_size=40, num_classes=10,
bidirectional=False):
super().__init__()
self.encoder = RNN(input_size, hidden_size, bidirectional=bidirectional
)
out_size = hidden_size if not bidirectional else 2 * hidden_size
self.clf = nn.Linear(out_size, num_classes)
def forward(self, x, lengths):
_, x, _ = self.encoder(x, lengths)
out = self.clf(x)
return out
def get_parser():
parser = ArgumentParser('MNIST classification example')
parser.add_argument('--hidden', dest='model.hidden_size', type=int,
help='Intermediate hidden layers for linear module')
parser.add_argument('--bi', dest='model.bidirectional', action=
'store_true', help='Use BiLSTM')
return parser
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Net(nn.Module):
def __init__(self, input_size, hidden_size=40, num_classes=10,
bidirectional=False):
super().__init__()
self.encoder = RNN(input_size, hidden_size, bidirectional=bidirectional
)
out_size = hidden_size if not bidirectional else 2 * hidden_size
self.clf = nn.Linear(out_size, num_classes)
def forward(self, x, lengths):
_, x, _ = self.encoder(x, lengths)
out = self.clf(x)
return out
def get_parser():
parser = ArgumentParser('MNIST classification example')
parser.add_argument('--hidden', dest='model.hidden_size', type=int,
help='Intermediate hidden layers for linear module')
parser.add_argument('--bi', dest='model.bidirectional', action=
'store_true', help='Use BiLSTM')
return parser
def get_data():
MNIST.resources = [(
'https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz'
, 'f68b3c2dcbeaaa9fbdd348bbdeb94873'), (
'https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz'
, 'd53e105ee54ea40749a09fcbcd1e9432'), (
'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz'
, '9fb629c4189551a2d022fa330f9573f3'), (
'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz'
, 'ec29112dd5afa0611ce80d1b7f02629c')]
def squeeze(x):
return x.squeeze()
data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,)),
squeeze])
train = MNIST(download=True, root='.', transform=data_transform, train=True
)
val = MNIST(download=False, root='.', transform=data_transform, train=False
)
return train, val
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Net(nn.Module):
def __init__(self, input_size, hidden_size=40, num_classes=10,
bidirectional=False):
super().__init__()
self.encoder = RNN(input_size, hidden_size, bidirectional=bidirectional
)
out_size = hidden_size if not bidirectional else 2 * hidden_size
self.clf = nn.Linear(out_size, num_classes)
def forward(self, x, lengths):
_, x, _ = self.encoder(x, lengths)
out = self.clf(x)
return out
def get_parser():
parser = ArgumentParser('MNIST classification example')
parser.add_argument('--hidden', dest='model.hidden_size', type=int,
help='Intermediate hidden layers for linear module')
parser.add_argument('--bi', dest='model.bidirectional', action=
'store_true', help='Use BiLSTM')
return parser
def get_data():
MNIST.resources = [(
'https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz'
, 'f68b3c2dcbeaaa9fbdd348bbdeb94873'), (
'https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz'
, 'd53e105ee54ea40749a09fcbcd1e9432'), (
'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz'
, '9fb629c4189551a2d022fa330f9573f3'), (
'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz'
, 'ec29112dd5afa0611ce80d1b7f02629c')]
def squeeze(x):
return x.squeeze()
data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,)),
squeeze])
train = MNIST(download=True, root='.', transform=data_transform, train=True
)
val = MNIST(download=False, root='.', transform=data_transform, train=False
)
return train, val
if __name__ == '__main__':
parser = get_parser()
parser = make_cli_parser(parser, PLDataModuleFromDatasets)
config = parse_config(parser, parser.parse_args().config)
if config.trainer.experiment_name == 'experiment':
config.trainer.experiment_name = 'mnist-rnn-classification'
configure_logging(f'logs/{config.trainer.experiment_name}')
if config.seed is not None:
logger.info('Seeding everything with seed={seed}')
pl.utilities.seed.seed_everything(seed=config.seed)
train, test = get_data()
ldm = PLDataModuleFromDatasets(train, test=test, seed=config.seed,
collate_fn=collate_fn, **config.data)
model = Net(28, **config.model)
optimizer = getattr(optim, config.optimizer)(model.parameters(), **
config.optim)
criterion = nn.CrossEntropyLoss()
lr_scheduler = None
if config.lr_scheduler:
lr_scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, **
config.lr_schedule)
lm = RnnPLModule(model, optimizer, criterion, lr_scheduler=lr_scheduler,
metrics={'acc': FromLogits(pl.metrics.classification.Accuracy())},
hparams=config)
if config.debug:
logger.info('Running in debug mode: Fast run on 5 batches')
trainer = make_trainer(fast_dev_run=5)
trainer.fit(lm, datamodule=ldm)
logger.info('Running in debug mode: Overfitting 5 batches')
trainer = make_trainer(overfit_batches=5)
trainer.fit(lm, datamodule=ldm)
else:
trainer = make_trainer(**config.trainer)
watch_model(trainer, model)
trainer.fit(lm, datamodule=ldm)
trainer.test(ckpt_path='best', test_dataloaders=ldm.test_dataloader())
logger.info('Run finished. Uploading files to wandb...')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
collate_fn = SequenceClassificationCollator()
class Net(nn.Module):
def __init__(self, input_size, hidden_size=40, num_classes=10,
bidirectional=False):
super().__init__()
self.encoder = RNN(input_size, hidden_size, bidirectional=bidirectional
)
out_size = hidden_size if not bidirectional else 2 * hidden_size
self.clf = nn.Linear(out_size, num_classes)
def forward(self, x, lengths):
_, x, _ = self.encoder(x, lengths)
out = self.clf(x)
return out
def get_parser():
parser = ArgumentParser('MNIST classification example')
parser.add_argument('--hidden', dest='model.hidden_size', type=int,
help='Intermediate hidden layers for linear module')
parser.add_argument('--bi', dest='model.bidirectional', action=
'store_true', help='Use BiLSTM')
return parser
def get_data():
MNIST.resources = [(
'https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz'
, 'f68b3c2dcbeaaa9fbdd348bbdeb94873'), (
'https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz'
, 'd53e105ee54ea40749a09fcbcd1e9432'), (
'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz'
, '9fb629c4189551a2d022fa330f9573f3'), (
'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz'
, 'ec29112dd5afa0611ce80d1b7f02629c')]
def squeeze(x):
return x.squeeze()
data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,)),
squeeze])
train = MNIST(download=True, root='.', transform=data_transform, train=True
)
val = MNIST(download=False, root='.', transform=data_transform, train=False
)
return train, val
if __name__ == '__main__':
parser = get_parser()
parser = make_cli_parser(parser, PLDataModuleFromDatasets)
config = parse_config(parser, parser.parse_args().config)
if config.trainer.experiment_name == 'experiment':
config.trainer.experiment_name = 'mnist-rnn-classification'
configure_logging(f'logs/{config.trainer.experiment_name}')
if config.seed is not None:
logger.info('Seeding everything with seed={seed}')
pl.utilities.seed.seed_everything(seed=config.seed)
train, test = get_data()
ldm = PLDataModuleFromDatasets(train, test=test, seed=config.seed,
collate_fn=collate_fn, **config.data)
model = Net(28, **config.model)
optimizer = getattr(optim, config.optimizer)(model.parameters(), **
config.optim)
criterion = nn.CrossEntropyLoss()
lr_scheduler = None
if config.lr_scheduler:
lr_scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, **
config.lr_schedule)
lm = RnnPLModule(model, optimizer, criterion, lr_scheduler=lr_scheduler,
metrics={'acc': FromLogits(pl.metrics.classification.Accuracy())},
hparams=config)
if config.debug:
logger.info('Running in debug mode: Fast run on 5 batches')
trainer = make_trainer(fast_dev_run=5)
trainer.fit(lm, datamodule=ldm)
logger.info('Running in debug mode: Overfitting 5 batches')
trainer = make_trainer(overfit_batches=5)
trainer.fit(lm, datamodule=ldm)
else:
trainer = make_trainer(**config.trainer)
watch_model(trainer, model)
trainer.fit(lm, datamodule=ldm)
trainer.test(ckpt_path='best', test_dataloaders=ldm.test_dataloader())
logger.info('Run finished. Uploading files to wandb...')
<|reserved_special_token_1|>
# python examples/mnist_rnn.py --bsz 128 --bsz-eval 256
import sys
from argparse import ArgumentParser
import pytorch_lightning as pl
import torch.nn as nn
import torch.optim as optim
from loguru import logger
from slp.config.config_parser import make_cli_parser, parse_config
from slp.data.collators import SequenceClassificationCollator
from slp.modules.rnn import RNN
from slp.plbind import (
FromLogits,
PLDataModuleFromDatasets,
RnnPLModule,
make_trainer,
watch_model,
)
from slp.util.log import configure_logging
from torchvision.datasets import MNIST # type: ignore
from torchvision.transforms import Compose, Normalize, ToTensor # type: ignore
collate_fn = SequenceClassificationCollator()
class Net(nn.Module):
def __init__(self, input_size, hidden_size=40, num_classes=10, bidirectional=False):
super().__init__()
self.encoder = RNN(input_size, hidden_size, bidirectional=bidirectional)
out_size = hidden_size if not bidirectional else 2 * hidden_size
self.clf = nn.Linear(out_size, num_classes)
def forward(self, x, lengths):
_, x, _ = self.encoder(x, lengths)
out = self.clf(x)
return out
def get_parser():
parser = ArgumentParser("MNIST classification example")
parser.add_argument(
"--hidden",
dest="model.hidden_size",
type=int,
help="Intermediate hidden layers for linear module",
)
parser.add_argument(
"--bi",
dest="model.bidirectional",
action="store_true",
help="Use BiLSTM",
)
return parser
def get_data():
# Fix: https://stackoverflow.com/a/66820249
MNIST.resources = [
(
"https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz",
"f68b3c2dcbeaaa9fbdd348bbdeb94873",
),
(
"https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz",
"d53e105ee54ea40749a09fcbcd1e9432",
),
(
"https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz",
"9fb629c4189551a2d022fa330f9573f3",
),
(
"https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz",
"ec29112dd5afa0611ce80d1b7f02629c",
),
]
def squeeze(x):
return x.squeeze()
data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,)), squeeze])
train = MNIST(download=True, root=".", transform=data_transform, train=True)
val = MNIST(download=False, root=".", transform=data_transform, train=False)
return train, val
if __name__ == "__main__":
# SETUP ##################################################
parser = get_parser()
parser = make_cli_parser(parser, PLDataModuleFromDatasets)
config = parse_config(parser, parser.parse_args().config)
if config.trainer.experiment_name == "experiment":
config.trainer.experiment_name = "mnist-rnn-classification"
configure_logging(f"logs/{config.trainer.experiment_name}")
if config.seed is not None:
logger.info("Seeding everything with seed={seed}")
pl.utilities.seed.seed_everything(seed=config.seed)
train, test = get_data()
# Get data and make datamodule ##########################
ldm = PLDataModuleFromDatasets(
train, test=test, seed=config.seed, collate_fn=collate_fn, **config.data
)
# Create model, optimizer, criterion, scheduler ###########
model = Net(28, **config.model)
optimizer = getattr(optim, config.optimizer)(model.parameters(), **config.optim)
criterion = nn.CrossEntropyLoss()
lr_scheduler = None
if config.lr_scheduler:
lr_scheduler = optim.lr_scheduler.ReduceLROnPlateau(
optimizer, **config.lr_schedule
)
# Wrap in PLModule, & configure metrics ####################
lm = RnnPLModule(
model,
optimizer,
criterion,
lr_scheduler=lr_scheduler,
metrics={"acc": FromLogits(pl.metrics.classification.Accuracy())},
hparams=config,
)
# Run debugging session or fit & test the model ############
if config.debug:
logger.info("Running in debug mode: Fast run on 5 batches")
trainer = make_trainer(fast_dev_run=5)
trainer.fit(lm, datamodule=ldm)
logger.info("Running in debug mode: Overfitting 5 batches")
trainer = make_trainer(overfit_batches=5)
trainer.fit(lm, datamodule=ldm)
else:
trainer = make_trainer(**config.trainer)
watch_model(trainer, model)
trainer.fit(lm, datamodule=ldm)
trainer.test(ckpt_path="best", test_dataloaders=ldm.test_dataloader())
logger.info("Run finished. Uploading files to wandb...")
|
flexible
|
{
"blob_id": "d8a09f9952856da69120fae6221636dd5bd8c93e",
"index": 3567,
"step-1": "<mask token>\n\n\nclass Net(nn.Module):\n\n def __init__(self, input_size, hidden_size=40, num_classes=10,\n bidirectional=False):\n super().__init__()\n self.encoder = RNN(input_size, hidden_size, bidirectional=bidirectional\n )\n out_size = hidden_size if not bidirectional else 2 * hidden_size\n self.clf = nn.Linear(out_size, num_classes)\n\n def forward(self, x, lengths):\n _, x, _ = self.encoder(x, lengths)\n out = self.clf(x)\n return out\n\n\ndef get_parser():\n parser = ArgumentParser('MNIST classification example')\n parser.add_argument('--hidden', dest='model.hidden_size', type=int,\n help='Intermediate hidden layers for linear module')\n parser.add_argument('--bi', dest='model.bidirectional', action=\n 'store_true', help='Use BiLSTM')\n return parser\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Net(nn.Module):\n\n def __init__(self, input_size, hidden_size=40, num_classes=10,\n bidirectional=False):\n super().__init__()\n self.encoder = RNN(input_size, hidden_size, bidirectional=bidirectional\n )\n out_size = hidden_size if not bidirectional else 2 * hidden_size\n self.clf = nn.Linear(out_size, num_classes)\n\n def forward(self, x, lengths):\n _, x, _ = self.encoder(x, lengths)\n out = self.clf(x)\n return out\n\n\ndef get_parser():\n parser = ArgumentParser('MNIST classification example')\n parser.add_argument('--hidden', dest='model.hidden_size', type=int,\n help='Intermediate hidden layers for linear module')\n parser.add_argument('--bi', dest='model.bidirectional', action=\n 'store_true', help='Use BiLSTM')\n return parser\n\n\ndef get_data():\n MNIST.resources = [(\n 'https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz'\n , 'f68b3c2dcbeaaa9fbdd348bbdeb94873'), (\n 'https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz'\n , 'd53e105ee54ea40749a09fcbcd1e9432'), (\n 'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz'\n , '9fb629c4189551a2d022fa330f9573f3'), (\n 'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz'\n , 'ec29112dd5afa0611ce80d1b7f02629c')]\n\n def squeeze(x):\n return x.squeeze()\n data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,)),\n squeeze])\n train = MNIST(download=True, root='.', transform=data_transform, train=True\n )\n val = MNIST(download=False, root='.', transform=data_transform, train=False\n )\n return train, val\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Net(nn.Module):\n\n def __init__(self, input_size, hidden_size=40, num_classes=10,\n bidirectional=False):\n super().__init__()\n self.encoder = RNN(input_size, hidden_size, bidirectional=bidirectional\n )\n out_size = hidden_size if not bidirectional else 2 * hidden_size\n self.clf = nn.Linear(out_size, num_classes)\n\n def forward(self, x, lengths):\n _, x, _ = self.encoder(x, lengths)\n out = self.clf(x)\n return out\n\n\ndef get_parser():\n parser = ArgumentParser('MNIST classification example')\n parser.add_argument('--hidden', dest='model.hidden_size', type=int,\n help='Intermediate hidden layers for linear module')\n parser.add_argument('--bi', dest='model.bidirectional', action=\n 'store_true', help='Use BiLSTM')\n return parser\n\n\ndef get_data():\n MNIST.resources = [(\n 'https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz'\n , 'f68b3c2dcbeaaa9fbdd348bbdeb94873'), (\n 'https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz'\n , 'd53e105ee54ea40749a09fcbcd1e9432'), (\n 'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz'\n , '9fb629c4189551a2d022fa330f9573f3'), (\n 'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz'\n , 'ec29112dd5afa0611ce80d1b7f02629c')]\n\n def squeeze(x):\n return x.squeeze()\n data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,)),\n squeeze])\n train = MNIST(download=True, root='.', transform=data_transform, train=True\n )\n val = MNIST(download=False, root='.', transform=data_transform, train=False\n )\n return train, val\n\n\nif __name__ == '__main__':\n parser = get_parser()\n parser = make_cli_parser(parser, PLDataModuleFromDatasets)\n config = parse_config(parser, parser.parse_args().config)\n if config.trainer.experiment_name == 'experiment':\n config.trainer.experiment_name = 'mnist-rnn-classification'\n configure_logging(f'logs/{config.trainer.experiment_name}')\n if config.seed is not None:\n logger.info('Seeding everything with seed={seed}')\n pl.utilities.seed.seed_everything(seed=config.seed)\n train, test = get_data()\n ldm = PLDataModuleFromDatasets(train, test=test, seed=config.seed,\n collate_fn=collate_fn, **config.data)\n model = Net(28, **config.model)\n optimizer = getattr(optim, config.optimizer)(model.parameters(), **\n config.optim)\n criterion = nn.CrossEntropyLoss()\n lr_scheduler = None\n if config.lr_scheduler:\n lr_scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, **\n config.lr_schedule)\n lm = RnnPLModule(model, optimizer, criterion, lr_scheduler=lr_scheduler,\n metrics={'acc': FromLogits(pl.metrics.classification.Accuracy())},\n hparams=config)\n if config.debug:\n logger.info('Running in debug mode: Fast run on 5 batches')\n trainer = make_trainer(fast_dev_run=5)\n trainer.fit(lm, datamodule=ldm)\n logger.info('Running in debug mode: Overfitting 5 batches')\n trainer = make_trainer(overfit_batches=5)\n trainer.fit(lm, datamodule=ldm)\n else:\n trainer = make_trainer(**config.trainer)\n watch_model(trainer, model)\n trainer.fit(lm, datamodule=ldm)\n trainer.test(ckpt_path='best', test_dataloaders=ldm.test_dataloader())\n logger.info('Run finished. Uploading files to wandb...')\n",
"step-4": "<mask token>\ncollate_fn = SequenceClassificationCollator()\n\n\nclass Net(nn.Module):\n\n def __init__(self, input_size, hidden_size=40, num_classes=10,\n bidirectional=False):\n super().__init__()\n self.encoder = RNN(input_size, hidden_size, bidirectional=bidirectional\n )\n out_size = hidden_size if not bidirectional else 2 * hidden_size\n self.clf = nn.Linear(out_size, num_classes)\n\n def forward(self, x, lengths):\n _, x, _ = self.encoder(x, lengths)\n out = self.clf(x)\n return out\n\n\ndef get_parser():\n parser = ArgumentParser('MNIST classification example')\n parser.add_argument('--hidden', dest='model.hidden_size', type=int,\n help='Intermediate hidden layers for linear module')\n parser.add_argument('--bi', dest='model.bidirectional', action=\n 'store_true', help='Use BiLSTM')\n return parser\n\n\ndef get_data():\n MNIST.resources = [(\n 'https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz'\n , 'f68b3c2dcbeaaa9fbdd348bbdeb94873'), (\n 'https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz'\n , 'd53e105ee54ea40749a09fcbcd1e9432'), (\n 'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz'\n , '9fb629c4189551a2d022fa330f9573f3'), (\n 'https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz'\n , 'ec29112dd5afa0611ce80d1b7f02629c')]\n\n def squeeze(x):\n return x.squeeze()\n data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,)),\n squeeze])\n train = MNIST(download=True, root='.', transform=data_transform, train=True\n )\n val = MNIST(download=False, root='.', transform=data_transform, train=False\n )\n return train, val\n\n\nif __name__ == '__main__':\n parser = get_parser()\n parser = make_cli_parser(parser, PLDataModuleFromDatasets)\n config = parse_config(parser, parser.parse_args().config)\n if config.trainer.experiment_name == 'experiment':\n config.trainer.experiment_name = 'mnist-rnn-classification'\n configure_logging(f'logs/{config.trainer.experiment_name}')\n if config.seed is not None:\n logger.info('Seeding everything with seed={seed}')\n pl.utilities.seed.seed_everything(seed=config.seed)\n train, test = get_data()\n ldm = PLDataModuleFromDatasets(train, test=test, seed=config.seed,\n collate_fn=collate_fn, **config.data)\n model = Net(28, **config.model)\n optimizer = getattr(optim, config.optimizer)(model.parameters(), **\n config.optim)\n criterion = nn.CrossEntropyLoss()\n lr_scheduler = None\n if config.lr_scheduler:\n lr_scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, **\n config.lr_schedule)\n lm = RnnPLModule(model, optimizer, criterion, lr_scheduler=lr_scheduler,\n metrics={'acc': FromLogits(pl.metrics.classification.Accuracy())},\n hparams=config)\n if config.debug:\n logger.info('Running in debug mode: Fast run on 5 batches')\n trainer = make_trainer(fast_dev_run=5)\n trainer.fit(lm, datamodule=ldm)\n logger.info('Running in debug mode: Overfitting 5 batches')\n trainer = make_trainer(overfit_batches=5)\n trainer.fit(lm, datamodule=ldm)\n else:\n trainer = make_trainer(**config.trainer)\n watch_model(trainer, model)\n trainer.fit(lm, datamodule=ldm)\n trainer.test(ckpt_path='best', test_dataloaders=ldm.test_dataloader())\n logger.info('Run finished. Uploading files to wandb...')\n",
"step-5": "# python examples/mnist_rnn.py --bsz 128 --bsz-eval 256\n\nimport sys\nfrom argparse import ArgumentParser\n\nimport pytorch_lightning as pl\nimport torch.nn as nn\nimport torch.optim as optim\nfrom loguru import logger\nfrom slp.config.config_parser import make_cli_parser, parse_config\nfrom slp.data.collators import SequenceClassificationCollator\nfrom slp.modules.rnn import RNN\nfrom slp.plbind import (\n FromLogits,\n PLDataModuleFromDatasets,\n RnnPLModule,\n make_trainer,\n watch_model,\n)\nfrom slp.util.log import configure_logging\nfrom torchvision.datasets import MNIST # type: ignore\nfrom torchvision.transforms import Compose, Normalize, ToTensor # type: ignore\n\ncollate_fn = SequenceClassificationCollator()\n\n\nclass Net(nn.Module):\n def __init__(self, input_size, hidden_size=40, num_classes=10, bidirectional=False):\n super().__init__()\n self.encoder = RNN(input_size, hidden_size, bidirectional=bidirectional)\n out_size = hidden_size if not bidirectional else 2 * hidden_size\n self.clf = nn.Linear(out_size, num_classes)\n\n def forward(self, x, lengths):\n _, x, _ = self.encoder(x, lengths)\n out = self.clf(x)\n\n return out\n\n\ndef get_parser():\n parser = ArgumentParser(\"MNIST classification example\")\n parser.add_argument(\n \"--hidden\",\n dest=\"model.hidden_size\",\n type=int,\n help=\"Intermediate hidden layers for linear module\",\n )\n parser.add_argument(\n \"--bi\",\n dest=\"model.bidirectional\",\n action=\"store_true\",\n help=\"Use BiLSTM\",\n )\n\n return parser\n\n\ndef get_data():\n # Fix: https://stackoverflow.com/a/66820249\n MNIST.resources = [\n (\n \"https://ossci-datasets.s3.amazonaws.com/mnist/train-images-idx3-ubyte.gz\",\n \"f68b3c2dcbeaaa9fbdd348bbdeb94873\",\n ),\n (\n \"https://ossci-datasets.s3.amazonaws.com/mnist/train-labels-idx1-ubyte.gz\",\n \"d53e105ee54ea40749a09fcbcd1e9432\",\n ),\n (\n \"https://ossci-datasets.s3.amazonaws.com/mnist/t10k-images-idx3-ubyte.gz\",\n \"9fb629c4189551a2d022fa330f9573f3\",\n ),\n (\n \"https://ossci-datasets.s3.amazonaws.com/mnist/t10k-labels-idx1-ubyte.gz\",\n \"ec29112dd5afa0611ce80d1b7f02629c\",\n ),\n ]\n\n def squeeze(x):\n return x.squeeze()\n\n data_transform = Compose([ToTensor(), Normalize((0.1307,), (0.3081,)), squeeze])\n train = MNIST(download=True, root=\".\", transform=data_transform, train=True)\n\n val = MNIST(download=False, root=\".\", transform=data_transform, train=False)\n\n return train, val\n\n\nif __name__ == \"__main__\":\n # SETUP ##################################################\n parser = get_parser()\n parser = make_cli_parser(parser, PLDataModuleFromDatasets)\n\n config = parse_config(parser, parser.parse_args().config)\n\n if config.trainer.experiment_name == \"experiment\":\n config.trainer.experiment_name = \"mnist-rnn-classification\"\n\n configure_logging(f\"logs/{config.trainer.experiment_name}\")\n\n if config.seed is not None:\n logger.info(\"Seeding everything with seed={seed}\")\n pl.utilities.seed.seed_everything(seed=config.seed)\n\n train, test = get_data()\n\n # Get data and make datamodule ##########################\n ldm = PLDataModuleFromDatasets(\n train, test=test, seed=config.seed, collate_fn=collate_fn, **config.data\n )\n\n # Create model, optimizer, criterion, scheduler ###########\n model = Net(28, **config.model)\n\n optimizer = getattr(optim, config.optimizer)(model.parameters(), **config.optim)\n criterion = nn.CrossEntropyLoss()\n\n lr_scheduler = None\n\n if config.lr_scheduler:\n lr_scheduler = optim.lr_scheduler.ReduceLROnPlateau(\n optimizer, **config.lr_schedule\n )\n\n # Wrap in PLModule, & configure metrics ####################\n lm = RnnPLModule(\n model,\n optimizer,\n criterion,\n lr_scheduler=lr_scheduler,\n metrics={\"acc\": FromLogits(pl.metrics.classification.Accuracy())},\n hparams=config,\n )\n\n # Run debugging session or fit & test the model ############\n\n if config.debug:\n logger.info(\"Running in debug mode: Fast run on 5 batches\")\n trainer = make_trainer(fast_dev_run=5)\n trainer.fit(lm, datamodule=ldm)\n\n logger.info(\"Running in debug mode: Overfitting 5 batches\")\n trainer = make_trainer(overfit_batches=5)\n trainer.fit(lm, datamodule=ldm)\n\n else:\n trainer = make_trainer(**config.trainer)\n watch_model(trainer, model)\n\n trainer.fit(lm, datamodule=ldm)\n\n trainer.test(ckpt_path=\"best\", test_dataloaders=ldm.test_dataloader())\n\n logger.info(\"Run finished. Uploading files to wandb...\")\n",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
from botocore_eb.model import ServiceModel
from botocore_eb.exceptions import ParamValidationError
from botocore_eb.exceptions import DataNotFoundError
from botocore_eb.exceptions import OperationNotPageableError
from botocore_eb import xform_name
from botocore_eb.paginate import Paginator
import botocore_eb.validate
import botocore_eb.serialize
class ClientError(Exception):
MSG_TEMPLATE = (
'An error occurred ({error_code}) when calling the {operation_name} '
'operation: {error_message}')
def __init__(self, error_response, operation_name):
msg = self.MSG_TEMPLATE.format(
error_code=error_response['Error']['Code'],
error_message=error_response['Error']['Message'],
operation_name=operation_name)
super(ClientError, self).__init__(msg)
self.response = error_response
class ClientCreator(object):
"""Creates client objects for a service."""
def __init__(self, loader, endpoint_creator):
self._loader = loader
self._endpoint_creator = endpoint_creator
def create_client(self, service_name, region_name, is_secure=True,
endpoint_url=None, verify=None):
service_model = self._load_service_model(service_name)
cls = self.create_client_class(service_name)
client_args = self._get_client_args(service_model, region_name, is_secure,
endpoint_url, verify)
return cls(**client_args)
def create_client_class(self, service_name):
service_model = self._load_service_model(service_name)
methods = self._create_methods(service_model)
py_name_to_operation_name = self._create_name_mapping(service_model)
self._add_pagination_methods(service_model, methods,
py_name_to_operation_name)
cls = type(service_name, (BaseClient,), methods)
return cls
def _add_pagination_methods(self, service_model, methods, name_mapping):
loader = self._loader
def get_paginator(self, operation_name):
"""Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you'd normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator("create_foo")``.
:raise OperationNotPageableError: Raised if the operation is not
pageable. You can use the ``client.can_paginate`` method to
check if an operation is pageable.
:rtype: L{botocore.paginate.Paginator}
:return: A paginator object.
"""
# Note that the 'self' in this method refers to the self on
# BaseClient, not on ClientCreator.
if not self.can_paginate(operation_name):
raise OperationNotPageableError(operation_name=operation_name)
else:
actual_operation_name = name_mapping[operation_name]
paginator = Paginator(
getattr(self, operation_name),
self._cache['page_config'][actual_operation_name])
return paginator
def can_paginate(self, operation_name):
"""Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you'd normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator("create_foo")``.
:return: ``True`` if the operation can be paginated,
``False`` otherwise.
"""
if 'page_config' not in self._cache:
try:
page_config = loader.load_data('aws/%s/%s.paginators' % (
service_model.endpoint_prefix,
service_model.api_version))['pagination']
self._cache['page_config'] = page_config
except DataNotFoundError:
self._cache['page_config'] = {}
actual_operation_name = name_mapping[operation_name]
return actual_operation_name in self._cache['page_config']
methods['get_paginator'] = get_paginator
methods['can_paginate'] = can_paginate
def _load_service_model(self, service_name):
json_model = self._loader.load_service_model('aws/%s' % service_name)
service_model = ServiceModel(json_model)
return service_model
def _get_client_args(self, service_model, region_name, is_secure,
endpoint_url, verify):
# A client needs:
#
# * serializer
# * endpoint
# * response parser
protocol = service_model.metadata['protocol']
serializer = botocore_eb.serialize.create_serializer(
protocol, include_validation=True)
endpoint = self._endpoint_creator.create_endpoint(
service_model, region_name, is_secure=is_secure,
endpoint_url=endpoint_url, verify=verify)
response_parser = botocore_eb.parsers.create_parser(protocol)
return {
'serializer': serializer,
'endpoint': endpoint,
'response_parser': response_parser
}
def _create_methods(self, service_model):
op_dict = {}
for operation_name in service_model.operation_names:
py_operation_name = xform_name(operation_name)
op_dict[py_operation_name] = self._create_api_method(
py_operation_name, operation_name, service_model)
return op_dict
def _create_name_mapping(self, service_model):
# py_name -> OperationName
mapping = {}
for operation_name in service_model.operation_names:
py_operation_name = xform_name(operation_name)
mapping[py_operation_name] = operation_name
return mapping
def _create_api_method(self, py_operation_name, operation_name,
service_model):
def _api_call(self, **kwargs):
operation_model = service_model.operation_model(operation_name)
request_dict = self._serializer.serialize_to_request(
kwargs, operation_model)
http, parsed_response = self._endpoint.make_request(
operation_model, request_dict)
if http.status_code >= 300:
raise ClientError(parsed_response, operation_name)
else:
return parsed_response
_api_call.__name__ = str(py_operation_name)
# TODO: docstrings.
return _api_call
class BaseClient(object):
def __init__(self, serializer, endpoint, response_parser):
self._serializer = serializer
self._endpoint = endpoint
self._response_parser = response_parser
self._cache = {}
|
normal
|
{
"blob_id": "829c833866198307d7d19c4a0cbe40299ee14eb9",
"index": 5288,
"step-1": "<mask token>\n\n\nclass ClientCreator(object):\n <mask token>\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n <mask token>\n <mask token>\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n",
"step-2": "<mask token>\n\n\nclass ClientCreator(object):\n <mask token>\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n <mask token>\n <mask token>\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(protocol,\n include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(service_model,\n region_name, is_secure=is_secure, endpoint_url=endpoint_url,\n verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {'serializer': serializer, 'endpoint': endpoint,\n 'response_parser': response_parser}\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n <mask token>\n <mask token>\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n",
"step-3": "<mask token>\n\n\nclass ClientError(Exception):\n <mask token>\n\n def __init__(self, error_response, operation_name):\n msg = self.MSG_TEMPLATE.format(error_code=error_response['Error'][\n 'Code'], error_message=error_response['Error']['Message'],\n operation_name=operation_name)\n super(ClientError, self).__init__(msg)\n self.response = error_response\n\n\nclass ClientCreator(object):\n \"\"\"Creates client objects for a service.\"\"\"\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n\n def create_client_class(self, service_name):\n service_model = self._load_service_model(service_name)\n methods = self._create_methods(service_model)\n py_name_to_operation_name = self._create_name_mapping(service_model)\n self._add_pagination_methods(service_model, methods,\n py_name_to_operation_name)\n cls = type(service_name, (BaseClient,), methods)\n return cls\n\n def _add_pagination_methods(self, service_model, methods, name_mapping):\n loader = self._loader\n\n def get_paginator(self, operation_name):\n \"\"\"Create a paginator for an operation.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :raise OperationNotPageableError: Raised if the operation is not\n pageable. You can use the ``client.can_paginate`` method to\n check if an operation is pageable.\n\n :rtype: L{botocore.paginate.Paginator}\n :return: A paginator object.\n\n \"\"\"\n if not self.can_paginate(operation_name):\n raise OperationNotPageableError(operation_name=operation_name)\n else:\n actual_operation_name = name_mapping[operation_name]\n paginator = Paginator(getattr(self, operation_name), self.\n _cache['page_config'][actual_operation_name])\n return paginator\n\n def can_paginate(self, operation_name):\n \"\"\"Check if an operation can be paginated.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :return: ``True`` if the operation can be paginated,\n ``False`` otherwise.\n\n \"\"\"\n if 'page_config' not in self._cache:\n try:\n page_config = loader.load_data('aws/%s/%s.paginators' %\n (service_model.endpoint_prefix, service_model.\n api_version))['pagination']\n self._cache['page_config'] = page_config\n except DataNotFoundError:\n self._cache['page_config'] = {}\n actual_operation_name = name_mapping[operation_name]\n return actual_operation_name in self._cache['page_config']\n methods['get_paginator'] = get_paginator\n methods['can_paginate'] = can_paginate\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(protocol,\n include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(service_model,\n region_name, is_secure=is_secure, endpoint_url=endpoint_url,\n verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {'serializer': serializer, 'endpoint': endpoint,\n 'response_parser': response_parser}\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n\n def _create_name_mapping(self, service_model):\n mapping = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n mapping[py_operation_name] = operation_name\n return mapping\n\n def _create_api_method(self, py_operation_name, operation_name,\n service_model):\n\n def _api_call(self, **kwargs):\n operation_model = service_model.operation_model(operation_name)\n request_dict = self._serializer.serialize_to_request(kwargs,\n operation_model)\n http, parsed_response = self._endpoint.make_request(operation_model\n , request_dict)\n if http.status_code >= 300:\n raise ClientError(parsed_response, operation_name)\n else:\n return parsed_response\n _api_call.__name__ = str(py_operation_name)\n return _api_call\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n",
"step-4": "<mask token>\n\n\nclass ClientError(Exception):\n MSG_TEMPLATE = (\n 'An error occurred ({error_code}) when calling the {operation_name} operation: {error_message}'\n )\n\n def __init__(self, error_response, operation_name):\n msg = self.MSG_TEMPLATE.format(error_code=error_response['Error'][\n 'Code'], error_message=error_response['Error']['Message'],\n operation_name=operation_name)\n super(ClientError, self).__init__(msg)\n self.response = error_response\n\n\nclass ClientCreator(object):\n \"\"\"Creates client objects for a service.\"\"\"\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n\n def create_client_class(self, service_name):\n service_model = self._load_service_model(service_name)\n methods = self._create_methods(service_model)\n py_name_to_operation_name = self._create_name_mapping(service_model)\n self._add_pagination_methods(service_model, methods,\n py_name_to_operation_name)\n cls = type(service_name, (BaseClient,), methods)\n return cls\n\n def _add_pagination_methods(self, service_model, methods, name_mapping):\n loader = self._loader\n\n def get_paginator(self, operation_name):\n \"\"\"Create a paginator for an operation.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :raise OperationNotPageableError: Raised if the operation is not\n pageable. You can use the ``client.can_paginate`` method to\n check if an operation is pageable.\n\n :rtype: L{botocore.paginate.Paginator}\n :return: A paginator object.\n\n \"\"\"\n if not self.can_paginate(operation_name):\n raise OperationNotPageableError(operation_name=operation_name)\n else:\n actual_operation_name = name_mapping[operation_name]\n paginator = Paginator(getattr(self, operation_name), self.\n _cache['page_config'][actual_operation_name])\n return paginator\n\n def can_paginate(self, operation_name):\n \"\"\"Check if an operation can be paginated.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :return: ``True`` if the operation can be paginated,\n ``False`` otherwise.\n\n \"\"\"\n if 'page_config' not in self._cache:\n try:\n page_config = loader.load_data('aws/%s/%s.paginators' %\n (service_model.endpoint_prefix, service_model.\n api_version))['pagination']\n self._cache['page_config'] = page_config\n except DataNotFoundError:\n self._cache['page_config'] = {}\n actual_operation_name = name_mapping[operation_name]\n return actual_operation_name in self._cache['page_config']\n methods['get_paginator'] = get_paginator\n methods['can_paginate'] = can_paginate\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(protocol,\n include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(service_model,\n region_name, is_secure=is_secure, endpoint_url=endpoint_url,\n verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {'serializer': serializer, 'endpoint': endpoint,\n 'response_parser': response_parser}\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n\n def _create_name_mapping(self, service_model):\n mapping = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n mapping[py_operation_name] = operation_name\n return mapping\n\n def _create_api_method(self, py_operation_name, operation_name,\n service_model):\n\n def _api_call(self, **kwargs):\n operation_model = service_model.operation_model(operation_name)\n request_dict = self._serializer.serialize_to_request(kwargs,\n operation_model)\n http, parsed_response = self._endpoint.make_request(operation_model\n , request_dict)\n if http.status_code >= 300:\n raise ClientError(parsed_response, operation_name)\n else:\n return parsed_response\n _api_call.__name__ = str(py_operation_name)\n return _api_call\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n",
"step-5": "from botocore_eb.model import ServiceModel\nfrom botocore_eb.exceptions import ParamValidationError\nfrom botocore_eb.exceptions import DataNotFoundError\nfrom botocore_eb.exceptions import OperationNotPageableError\nfrom botocore_eb import xform_name\nfrom botocore_eb.paginate import Paginator\nimport botocore_eb.validate\nimport botocore_eb.serialize\n\n\nclass ClientError(Exception):\n MSG_TEMPLATE = (\n 'An error occurred ({error_code}) when calling the {operation_name} '\n 'operation: {error_message}')\n\n def __init__(self, error_response, operation_name):\n msg = self.MSG_TEMPLATE.format(\n error_code=error_response['Error']['Code'],\n error_message=error_response['Error']['Message'],\n operation_name=operation_name)\n super(ClientError, self).__init__(msg)\n self.response = error_response\n\n\nclass ClientCreator(object):\n \"\"\"Creates client objects for a service.\"\"\"\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name, is_secure,\n endpoint_url, verify)\n return cls(**client_args)\n\n def create_client_class(self, service_name):\n service_model = self._load_service_model(service_name)\n methods = self._create_methods(service_model)\n py_name_to_operation_name = self._create_name_mapping(service_model)\n self._add_pagination_methods(service_model, methods,\n py_name_to_operation_name)\n cls = type(service_name, (BaseClient,), methods)\n return cls\n\n def _add_pagination_methods(self, service_model, methods, name_mapping):\n loader = self._loader\n\n def get_paginator(self, operation_name):\n \"\"\"Create a paginator for an operation.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :raise OperationNotPageableError: Raised if the operation is not\n pageable. You can use the ``client.can_paginate`` method to\n check if an operation is pageable.\n\n :rtype: L{botocore.paginate.Paginator}\n :return: A paginator object.\n\n \"\"\"\n # Note that the 'self' in this method refers to the self on\n # BaseClient, not on ClientCreator.\n if not self.can_paginate(operation_name):\n raise OperationNotPageableError(operation_name=operation_name)\n else:\n actual_operation_name = name_mapping[operation_name]\n paginator = Paginator(\n getattr(self, operation_name),\n self._cache['page_config'][actual_operation_name])\n return paginator\n\n def can_paginate(self, operation_name):\n \"\"\"Check if an operation can be paginated.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :return: ``True`` if the operation can be paginated,\n ``False`` otherwise.\n\n \"\"\"\n if 'page_config' not in self._cache:\n try:\n page_config = loader.load_data('aws/%s/%s.paginators' % (\n service_model.endpoint_prefix,\n service_model.api_version))['pagination']\n self._cache['page_config'] = page_config\n except DataNotFoundError:\n self._cache['page_config'] = {}\n actual_operation_name = name_mapping[operation_name]\n return actual_operation_name in self._cache['page_config']\n\n methods['get_paginator'] = get_paginator\n methods['can_paginate'] = can_paginate\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n # A client needs:\n #\n # * serializer\n # * endpoint\n # * response parser\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(\n protocol, include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(\n service_model, region_name, is_secure=is_secure,\n endpoint_url=endpoint_url, verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {\n 'serializer': serializer,\n 'endpoint': endpoint,\n 'response_parser': response_parser\n }\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n\n def _create_name_mapping(self, service_model):\n # py_name -> OperationName\n mapping = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n mapping[py_operation_name] = operation_name\n return mapping\n\n def _create_api_method(self, py_operation_name, operation_name,\n service_model):\n def _api_call(self, **kwargs):\n operation_model = service_model.operation_model(operation_name)\n request_dict = self._serializer.serialize_to_request(\n kwargs, operation_model)\n\n http, parsed_response = self._endpoint.make_request(\n operation_model, request_dict)\n if http.status_code >= 300:\n raise ClientError(parsed_response, operation_name)\n else:\n return parsed_response\n\n _api_call.__name__ = str(py_operation_name)\n # TODO: docstrings.\n return _api_call\n\n\nclass BaseClient(object):\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n",
"step-ids": [
6,
8,
15,
16,
18
]
}
|
[
6,
8,
15,
16,
18
] |
import cv2
import numpy as np
img = cv2.imread('data/j.png', cv2.IMREAD_GRAYSCALE)
kernel = cv2.getStructuringElement(cv2.MORPH_CROSS, (7, 7))
erode = cv2.erode(img, kernel)
contorno = img - erode
cv2.imshow('Original', img)
cv2.imshow('Contorno', contorno)
cv2.waitKey()
cv2.destroyAllWindows()
|
normal
|
{
"blob_id": "809c9ce2b017612bedd1eb889c2b017275ee8b6f",
"index": 1729,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ncv2.imshow('Original', img)\ncv2.imshow('Contorno', contorno)\ncv2.waitKey()\ncv2.destroyAllWindows()\n",
"step-3": "<mask token>\nimg = cv2.imread('data/j.png', cv2.IMREAD_GRAYSCALE)\nkernel = cv2.getStructuringElement(cv2.MORPH_CROSS, (7, 7))\nerode = cv2.erode(img, kernel)\ncontorno = img - erode\ncv2.imshow('Original', img)\ncv2.imshow('Contorno', contorno)\ncv2.waitKey()\ncv2.destroyAllWindows()\n",
"step-4": "import cv2\nimport numpy as np\nimg = cv2.imread('data/j.png', cv2.IMREAD_GRAYSCALE)\nkernel = cv2.getStructuringElement(cv2.MORPH_CROSS, (7, 7))\nerode = cv2.erode(img, kernel)\ncontorno = img - erode\ncv2.imshow('Original', img)\ncv2.imshow('Contorno', contorno)\ncv2.waitKey()\ncv2.destroyAllWindows()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import requests
import datetime
import collections
import csv
import sys
import os
import os.path
History = collections.namedtuple('History', ['open', 'high', 'low', 'close', 'volume', 'adjustment'])
def history(symbol, since, until):
response = requests.get('http://ichart.finance.yahoo.com/table.csv?s=%s&d=%d&e=%d&f=%d&g=d&a=%d&b=%d&c=%d&ignore=.csv' % (
symbol,
until.month - 1,
until.day,
until.year,
since.month - 1,
since.day,
since.year,
))
for row in csv.reader(response.text.split('\n')[::-1][1:-1]):
yield History._make(map(float, row[1:]))
def last(symbol, start, number):
until = start - datetime.timedelta(days=1)
if until.weekday() == 6:
until -= datetime.timedelta(days=2)
elif until.weekday() == 0:
until -= datetime.timedelta(days=1)
since = until - datetime.timedelta(days=number - 1)
if since.weekday() in [0, 6]:
since -= datetime.timedelta(days=2)
return history(symbol, since, until)
def recent(symbol):
response = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=%s&f=d1ohgpvp&e=.csv' % symbol)
return History._make(map(float, csv.reader(response.text.split('\n', 1)).next()[1:]))
def qualify(symbol):
today = datetime.date.today()
data = dict(zip(['yy', 'y'], last(symbol, today, 2)))
try:
data['t'] = recent(symbol)
except ValueError:
return False
return data['yy'].close < data['y'].low and data['y'].close > data['t'].low
def process():
if len(sys.argv) > 1:
symbols = sys.argv[1:]
else:
symbols = []
for entry in os.listdir(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')):
symbols.append(entry.rsplit('.', 1)[0])
for symbol in symbols:
symbol = symbol.upper()
if symbol.strip() and qualify(symbol):
print symbol
if __name__ == '__main__':
process()
|
normal
|
{
"blob_id": "1cccb37a7195b1555513a32ef33b35b0edcd5eb1",
"index": 5363,
"step-1": "import requests\nimport datetime\nimport collections\nimport csv\nimport sys\nimport os\nimport os.path\n\n\nHistory = collections.namedtuple('History', ['open', 'high', 'low', 'close', 'volume', 'adjustment'])\n\ndef history(symbol, since, until):\n response = requests.get('http://ichart.finance.yahoo.com/table.csv?s=%s&d=%d&e=%d&f=%d&g=d&a=%d&b=%d&c=%d&ignore=.csv' % (\n symbol,\n until.month - 1,\n until.day,\n until.year,\n since.month - 1,\n since.day,\n since.year,\n ))\n for row in csv.reader(response.text.split('\\n')[::-1][1:-1]):\n yield History._make(map(float, row[1:]))\n\ndef last(symbol, start, number):\n until = start - datetime.timedelta(days=1)\n if until.weekday() == 6:\n until -= datetime.timedelta(days=2)\n elif until.weekday() == 0:\n until -= datetime.timedelta(days=1)\n since = until - datetime.timedelta(days=number - 1)\n if since.weekday() in [0, 6]:\n since -= datetime.timedelta(days=2)\n return history(symbol, since, until)\n \ndef recent(symbol):\n response = requests.get('http://download.finance.yahoo.com/d/quotes.csv?s=%s&f=d1ohgpvp&e=.csv' % symbol)\n return History._make(map(float, csv.reader(response.text.split('\\n', 1)).next()[1:]))\n\ndef qualify(symbol):\n today = datetime.date.today()\n data = dict(zip(['yy', 'y'], last(symbol, today, 2)))\n try:\n data['t'] = recent(symbol)\n except ValueError:\n return False\n return data['yy'].close < data['y'].low and data['y'].close > data['t'].low\n\ndef process():\n if len(sys.argv) > 1:\n symbols = sys.argv[1:]\n else:\n symbols = []\n for entry in os.listdir(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')):\n symbols.append(entry.rsplit('.', 1)[0])\n for symbol in symbols:\n symbol = symbol.upper()\n if symbol.strip() and qualify(symbol):\n print symbol\n\nif __name__ == '__main__':\n process()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
from population import Population
class REvolution:
def __init__(self, original_ind, combine_params, mutate_params, fitness, pop_params, method):
self.population = Population(1, fitness, pop_params)
self.combine_params = combine_params
self.mutate_params = mutate_params
self.fitness = fitness
self.method = method
self.result = []
self.original_ind = original_ind
def run_random(self, epochs):
for ep in range(epochs):
pop_ind = self.population.individuals[0]
offspring = pop_ind.compare(self.original_ind, self.combine_params, self.fitness)
offspring.mutate_random(self.mutate_params)
self.population.arrange_population([offspring])
print("Epoch {}: {}".format(ep, self.get_pop()))
self.result.append(self.fitness(self.population.individuals[-1].value))
def run_1_1(self, epochs):
for ep in range(epochs):
pop_ind = self.population.individuals[0]
offspring = pop_ind.compare(self.original_ind, self.combine_params, self.fitness)
offspring.mutate(self.mutate_params)
self.population.arrange_population([offspring])
print("Epoch {}: {}".format(ep, self.get_pop()))
self.result.append(self.fitness(self.population.individuals[-1].value))
def get_pop(self):
ids = ["x: {} => y: {}".format("%.3f" % i.value[0], "%.3f" % self.fitness(i.value))
for i in self.population.individuals]
return ids
|
normal
|
{
"blob_id": "fe13b57484e0f0796164fda99c0d759238a67153",
"index": 7215,
"step-1": "<mask token>\n\n\nclass REvolution:\n <mask token>\n <mask token>\n <mask token>\n\n def get_pop(self):\n ids = ['x: {} => y: {}'.format('%.3f' % i.value[0], '%.3f' % self.\n fitness(i.value)) for i in self.population.individuals]\n return ids\n",
"step-2": "<mask token>\n\n\nclass REvolution:\n\n def __init__(self, original_ind, combine_params, mutate_params, fitness,\n pop_params, method):\n self.population = Population(1, fitness, pop_params)\n self.combine_params = combine_params\n self.mutate_params = mutate_params\n self.fitness = fitness\n self.method = method\n self.result = []\n self.original_ind = original_ind\n <mask token>\n\n def run_1_1(self, epochs):\n for ep in range(epochs):\n pop_ind = self.population.individuals[0]\n offspring = pop_ind.compare(self.original_ind, self.\n combine_params, self.fitness)\n offspring.mutate(self.mutate_params)\n self.population.arrange_population([offspring])\n print('Epoch {}: {}'.format(ep, self.get_pop()))\n self.result.append(self.fitness(self.population.individuals[-1]\n .value))\n\n def get_pop(self):\n ids = ['x: {} => y: {}'.format('%.3f' % i.value[0], '%.3f' % self.\n fitness(i.value)) for i in self.population.individuals]\n return ids\n",
"step-3": "<mask token>\n\n\nclass REvolution:\n\n def __init__(self, original_ind, combine_params, mutate_params, fitness,\n pop_params, method):\n self.population = Population(1, fitness, pop_params)\n self.combine_params = combine_params\n self.mutate_params = mutate_params\n self.fitness = fitness\n self.method = method\n self.result = []\n self.original_ind = original_ind\n\n def run_random(self, epochs):\n for ep in range(epochs):\n pop_ind = self.population.individuals[0]\n offspring = pop_ind.compare(self.original_ind, self.\n combine_params, self.fitness)\n offspring.mutate_random(self.mutate_params)\n self.population.arrange_population([offspring])\n print('Epoch {}: {}'.format(ep, self.get_pop()))\n self.result.append(self.fitness(self.population.individuals[-1]\n .value))\n\n def run_1_1(self, epochs):\n for ep in range(epochs):\n pop_ind = self.population.individuals[0]\n offspring = pop_ind.compare(self.original_ind, self.\n combine_params, self.fitness)\n offspring.mutate(self.mutate_params)\n self.population.arrange_population([offspring])\n print('Epoch {}: {}'.format(ep, self.get_pop()))\n self.result.append(self.fitness(self.population.individuals[-1]\n .value))\n\n def get_pop(self):\n ids = ['x: {} => y: {}'.format('%.3f' % i.value[0], '%.3f' % self.\n fitness(i.value)) for i in self.population.individuals]\n return ids\n",
"step-4": "from population import Population\n\n\nclass REvolution:\n\n def __init__(self, original_ind, combine_params, mutate_params, fitness,\n pop_params, method):\n self.population = Population(1, fitness, pop_params)\n self.combine_params = combine_params\n self.mutate_params = mutate_params\n self.fitness = fitness\n self.method = method\n self.result = []\n self.original_ind = original_ind\n\n def run_random(self, epochs):\n for ep in range(epochs):\n pop_ind = self.population.individuals[0]\n offspring = pop_ind.compare(self.original_ind, self.\n combine_params, self.fitness)\n offspring.mutate_random(self.mutate_params)\n self.population.arrange_population([offspring])\n print('Epoch {}: {}'.format(ep, self.get_pop()))\n self.result.append(self.fitness(self.population.individuals[-1]\n .value))\n\n def run_1_1(self, epochs):\n for ep in range(epochs):\n pop_ind = self.population.individuals[0]\n offspring = pop_ind.compare(self.original_ind, self.\n combine_params, self.fitness)\n offspring.mutate(self.mutate_params)\n self.population.arrange_population([offspring])\n print('Epoch {}: {}'.format(ep, self.get_pop()))\n self.result.append(self.fitness(self.population.individuals[-1]\n .value))\n\n def get_pop(self):\n ids = ['x: {} => y: {}'.format('%.3f' % i.value[0], '%.3f' % self.\n fitness(i.value)) for i in self.population.individuals]\n return ids\n",
"step-5": "from population import Population\n\n\nclass REvolution:\n def __init__(self, original_ind, combine_params, mutate_params, fitness, pop_params, method):\n self.population = Population(1, fitness, pop_params)\n self.combine_params = combine_params\n self.mutate_params = mutate_params\n self.fitness = fitness\n self.method = method\n self.result = []\n self.original_ind = original_ind\n\n def run_random(self, epochs):\n for ep in range(epochs):\n pop_ind = self.population.individuals[0]\n offspring = pop_ind.compare(self.original_ind, self.combine_params, self.fitness)\n offspring.mutate_random(self.mutate_params)\n self.population.arrange_population([offspring])\n print(\"Epoch {}: {}\".format(ep, self.get_pop()))\n self.result.append(self.fitness(self.population.individuals[-1].value))\n\n def run_1_1(self, epochs):\n for ep in range(epochs):\n pop_ind = self.population.individuals[0]\n offspring = pop_ind.compare(self.original_ind, self.combine_params, self.fitness)\n offspring.mutate(self.mutate_params)\n self.population.arrange_population([offspring])\n print(\"Epoch {}: {}\".format(ep, self.get_pop()))\n self.result.append(self.fitness(self.population.individuals[-1].value))\n\n def get_pop(self):\n ids = [\"x: {} => y: {}\".format(\"%.3f\" % i.value[0], \"%.3f\" % self.fitness(i.value))\n for i in self.population.individuals]\n return ids\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
#!/usr/bin/env python
#
# This will take a snapshot and convert it into a volume. To create a volume
# without any links to the old snapshot you need to convert it to a temporary
# volume first, convert that into an image and convert the image back into
# your final volume. Once this is all done, the temporary volume and image
# will be removed.
#
import argparse
import openstack
import time
import sys
from sdk import Snapshot
def main(args):
# Set up the connection to OpenStack -- this is read from clouds.yaml
openstack.enable_logging(debug=False)
api = openstack.connect(cloud=args.cloud)
snapshot_id = args.snapshot
server = args.volume
# Create a snapshot object
try:
snapshot = Snapshot(
api=api,
snapshot=api.volume.get_snapshot(snapshot_id),
)
except openstack.exceptions.ResourceNotFound:
print('Snapshot id {} not found.'.format(snapshot_id))
sys.exit(1)
today = time.strftime("%d-%m-%Y")
# Convert the snapshot to a volume
print('')
print('Converting snapshot to volume..')
volume = snapshot.to_volume('{}-restore-{}'.format(server, today))
print('Converting volume to image..')
image = volume.to_image('{}-restore-{}'.format(server, today))
print('Converting image to volume..')
image.to_volume(server, size=volume.volume.size)
image.delete()
volume.delete()
print('')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Restore snapshots')
parser.add_argument(
'--snapshot',
required=True,
help='',
metavar=('<snapshot_id>'),
)
parser.add_argument(
'--volume',
required=True,
help='',
metavar=('<volume name>'),
)
parser.add_argument(
'--cloud',
help='',
metavar=('<cloud in clouds.yaml>'),
default='fuga',
)
args = parser.parse_args()
main(args)
|
normal
|
{
"blob_id": "aebe749a20482636d7ed508f9cbd9cde56656b73",
"index": 6236,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main(args):\n openstack.enable_logging(debug=False)\n api = openstack.connect(cloud=args.cloud)\n snapshot_id = args.snapshot\n server = args.volume\n try:\n snapshot = Snapshot(api=api, snapshot=api.volume.get_snapshot(\n snapshot_id))\n except openstack.exceptions.ResourceNotFound:\n print('Snapshot id {} not found.'.format(snapshot_id))\n sys.exit(1)\n today = time.strftime('%d-%m-%Y')\n print('')\n print('Converting snapshot to volume..')\n volume = snapshot.to_volume('{}-restore-{}'.format(server, today))\n print('Converting volume to image..')\n image = volume.to_image('{}-restore-{}'.format(server, today))\n print('Converting image to volume..')\n image.to_volume(server, size=volume.volume.size)\n image.delete()\n volume.delete()\n print('')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main(args):\n openstack.enable_logging(debug=False)\n api = openstack.connect(cloud=args.cloud)\n snapshot_id = args.snapshot\n server = args.volume\n try:\n snapshot = Snapshot(api=api, snapshot=api.volume.get_snapshot(\n snapshot_id))\n except openstack.exceptions.ResourceNotFound:\n print('Snapshot id {} not found.'.format(snapshot_id))\n sys.exit(1)\n today = time.strftime('%d-%m-%Y')\n print('')\n print('Converting snapshot to volume..')\n volume = snapshot.to_volume('{}-restore-{}'.format(server, today))\n print('Converting volume to image..')\n image = volume.to_image('{}-restore-{}'.format(server, today))\n print('Converting image to volume..')\n image.to_volume(server, size=volume.volume.size)\n image.delete()\n volume.delete()\n print('')\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Restore snapshots')\n parser.add_argument('--snapshot', required=True, help='', metavar=\n '<snapshot_id>')\n parser.add_argument('--volume', required=True, help='', metavar=\n '<volume name>')\n parser.add_argument('--cloud', help='', metavar=\n '<cloud in clouds.yaml>', default='fuga')\n args = parser.parse_args()\n main(args)\n",
"step-4": "import argparse\nimport openstack\nimport time\nimport sys\nfrom sdk import Snapshot\n\n\ndef main(args):\n openstack.enable_logging(debug=False)\n api = openstack.connect(cloud=args.cloud)\n snapshot_id = args.snapshot\n server = args.volume\n try:\n snapshot = Snapshot(api=api, snapshot=api.volume.get_snapshot(\n snapshot_id))\n except openstack.exceptions.ResourceNotFound:\n print('Snapshot id {} not found.'.format(snapshot_id))\n sys.exit(1)\n today = time.strftime('%d-%m-%Y')\n print('')\n print('Converting snapshot to volume..')\n volume = snapshot.to_volume('{}-restore-{}'.format(server, today))\n print('Converting volume to image..')\n image = volume.to_image('{}-restore-{}'.format(server, today))\n print('Converting image to volume..')\n image.to_volume(server, size=volume.volume.size)\n image.delete()\n volume.delete()\n print('')\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Restore snapshots')\n parser.add_argument('--snapshot', required=True, help='', metavar=\n '<snapshot_id>')\n parser.add_argument('--volume', required=True, help='', metavar=\n '<volume name>')\n parser.add_argument('--cloud', help='', metavar=\n '<cloud in clouds.yaml>', default='fuga')\n args = parser.parse_args()\n main(args)\n",
"step-5": "#!/usr/bin/env python\n#\n# This will take a snapshot and convert it into a volume. To create a volume\n# without any links to the old snapshot you need to convert it to a temporary\n# volume first, convert that into an image and convert the image back into\n# your final volume. Once this is all done, the temporary volume and image\n# will be removed.\n#\n\nimport argparse\nimport openstack\nimport time\nimport sys\nfrom sdk import Snapshot\n\n\ndef main(args):\n # Set up the connection to OpenStack -- this is read from clouds.yaml\n openstack.enable_logging(debug=False)\n api = openstack.connect(cloud=args.cloud)\n\n snapshot_id = args.snapshot\n server = args.volume\n\n # Create a snapshot object\n try:\n snapshot = Snapshot(\n api=api,\n snapshot=api.volume.get_snapshot(snapshot_id),\n )\n except openstack.exceptions.ResourceNotFound:\n print('Snapshot id {} not found.'.format(snapshot_id))\n sys.exit(1)\n\n today = time.strftime(\"%d-%m-%Y\")\n\n # Convert the snapshot to a volume\n print('')\n\n print('Converting snapshot to volume..')\n volume = snapshot.to_volume('{}-restore-{}'.format(server, today))\n\n print('Converting volume to image..')\n image = volume.to_image('{}-restore-{}'.format(server, today))\n\n print('Converting image to volume..')\n image.to_volume(server, size=volume.volume.size)\n\n image.delete()\n volume.delete()\n print('')\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='Restore snapshots')\n parser.add_argument(\n '--snapshot',\n required=True,\n help='',\n metavar=('<snapshot_id>'),\n )\n parser.add_argument(\n '--volume',\n required=True,\n help='',\n metavar=('<volume name>'),\n )\n parser.add_argument(\n '--cloud',\n help='',\n metavar=('<cloud in clouds.yaml>'),\n default='fuga',\n )\n\n args = parser.parse_args()\n main(args)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def Merge (left,right,merged):
#Ф-ция объединения и сравнения элементов массивов
left_cursor,right_cursor=0,0
while left_cursor<len(left) and right_cursor<len(right):
if left[left_cursor]<=right[right_cursor]:
merged[left_cursor+right_cursor]=left[left_cursor]
left_cursor+=1
else:
merged[left_cursor+right_cursor]=right[right_cursor]
right_cursor+=1
for left_cursor in range(left_cursor,len(left)):
merged[left_cursor+right_cursor]=left[left_cursor]
for right_cursor in range(right_cursor,len(right)):
merged[left_cursor+right_cursor]=right[right_cursor]
return merged
def MergeSort(array):
#Основная рекурсивная функция
if len(array)<=1:
return array
mid=len(array)//2
left,right=MergeSort(array[:mid]),MergeSort(array[mid:])
return Merge(left,right,array.copy())
"""
a=[2,45,1,4,66,34]
print(MergeSort(a))
print(a)
"""
|
normal
|
{
"blob_id": "c64c542b57107c06de2ce0751075a81fcb195b61",
"index": 4293,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef MergeSort(array):\n if len(array) <= 1:\n return array\n mid = len(array) // 2\n left, right = MergeSort(array[:mid]), MergeSort(array[mid:])\n return Merge(left, right, array.copy())\n\n\n<mask token>\n",
"step-3": "def Merge(left, right, merged):\n left_cursor, right_cursor = 0, 0\n while left_cursor < len(left) and right_cursor < len(right):\n if left[left_cursor] <= right[right_cursor]:\n merged[left_cursor + right_cursor] = left[left_cursor]\n left_cursor += 1\n else:\n merged[left_cursor + right_cursor] = right[right_cursor]\n right_cursor += 1\n for left_cursor in range(left_cursor, len(left)):\n merged[left_cursor + right_cursor] = left[left_cursor]\n for right_cursor in range(right_cursor, len(right)):\n merged[left_cursor + right_cursor] = right[right_cursor]\n return merged\n\n\ndef MergeSort(array):\n if len(array) <= 1:\n return array\n mid = len(array) // 2\n left, right = MergeSort(array[:mid]), MergeSort(array[mid:])\n return Merge(left, right, array.copy())\n\n\n<mask token>\n",
"step-4": "def Merge (left,right,merged):\n #Ф-ция объединения и сравнения элементов массивов \n left_cursor,right_cursor=0,0\n while left_cursor<len(left) and right_cursor<len(right):\n if left[left_cursor]<=right[right_cursor]:\n merged[left_cursor+right_cursor]=left[left_cursor]\n left_cursor+=1\n else:\n merged[left_cursor+right_cursor]=right[right_cursor]\n right_cursor+=1\n for left_cursor in range(left_cursor,len(left)):\n merged[left_cursor+right_cursor]=left[left_cursor]\n for right_cursor in range(right_cursor,len(right)):\n merged[left_cursor+right_cursor]=right[right_cursor]\n return merged\n\ndef MergeSort(array):\n #Основная рекурсивная функция\n if len(array)<=1:\n return array\n mid=len(array)//2\n left,right=MergeSort(array[:mid]),MergeSort(array[mid:])\n return Merge(left,right,array.copy())\n\n\n\"\"\"\na=[2,45,1,4,66,34]\nprint(MergeSort(a))\nprint(a) \n\"\"\"\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# coding: utf-8
"""
Adobe Experience Manager OSGI config (AEM) API
Swagger AEM OSGI is an OpenAPI specification for Adobe Experience Manager (AEM) OSGI Configurations API # noqa: E501
OpenAPI spec version: 1.0.0-pre.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'token_expiration': 'ConfigNodePropertyString',
'token_length': 'ConfigNodePropertyString',
'token_refresh': 'ConfigNodePropertyBoolean',
'token_cleanup_threshold': 'ConfigNodePropertyInteger',
'password_hash_algorithm': 'ConfigNodePropertyString',
'password_hash_iterations': 'ConfigNodePropertyInteger',
'password_salt_size': 'ConfigNodePropertyInteger'
}
attribute_map = {
'token_expiration': 'tokenExpiration',
'token_length': 'tokenLength',
'token_refresh': 'tokenRefresh',
'token_cleanup_threshold': 'tokenCleanupThreshold',
'password_hash_algorithm': 'passwordHashAlgorithm',
'password_hash_iterations': 'passwordHashIterations',
'password_salt_size': 'passwordSaltSize'
}
def __init__(self, token_expiration=None, token_length=None, token_refresh=None, token_cleanup_threshold=None, password_hash_algorithm=None, password_hash_iterations=None, password_salt_size=None): # noqa: E501
"""OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties - a model defined in OpenAPI""" # noqa: E501
self._token_expiration = None
self._token_length = None
self._token_refresh = None
self._token_cleanup_threshold = None
self._password_hash_algorithm = None
self._password_hash_iterations = None
self._password_salt_size = None
self.discriminator = None
if token_expiration is not None:
self.token_expiration = token_expiration
if token_length is not None:
self.token_length = token_length
if token_refresh is not None:
self.token_refresh = token_refresh
if token_cleanup_threshold is not None:
self.token_cleanup_threshold = token_cleanup_threshold
if password_hash_algorithm is not None:
self.password_hash_algorithm = password_hash_algorithm
if password_hash_iterations is not None:
self.password_hash_iterations = password_hash_iterations
if password_salt_size is not None:
self.password_salt_size = password_salt_size
@property
def token_expiration(self):
"""Gets the token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:return: The token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:rtype: ConfigNodePropertyString
"""
return self._token_expiration
@token_expiration.setter
def token_expiration(self, token_expiration):
"""Sets the token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.
:param token_expiration: The token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:type: ConfigNodePropertyString
"""
self._token_expiration = token_expiration
@property
def token_length(self):
"""Gets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:return: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:rtype: ConfigNodePropertyString
"""
return self._token_length
@token_length.setter
def token_length(self, token_length):
"""Sets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.
:param token_length: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:type: ConfigNodePropertyString
"""
self._token_length = token_length
@property
def token_refresh(self):
"""Gets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:return: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:rtype: ConfigNodePropertyBoolean
"""
return self._token_refresh
@token_refresh.setter
def token_refresh(self, token_refresh):
"""Sets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.
:param token_refresh: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:type: ConfigNodePropertyBoolean
"""
self._token_refresh = token_refresh
@property
def token_cleanup_threshold(self):
"""Gets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:return: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:rtype: ConfigNodePropertyInteger
"""
return self._token_cleanup_threshold
@token_cleanup_threshold.setter
def token_cleanup_threshold(self, token_cleanup_threshold):
"""Sets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.
:param token_cleanup_threshold: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:type: ConfigNodePropertyInteger
"""
self._token_cleanup_threshold = token_cleanup_threshold
@property
def password_hash_algorithm(self):
"""Gets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:return: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:rtype: ConfigNodePropertyString
"""
return self._password_hash_algorithm
@password_hash_algorithm.setter
def password_hash_algorithm(self, password_hash_algorithm):
"""Sets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.
:param password_hash_algorithm: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:type: ConfigNodePropertyString
"""
self._password_hash_algorithm = password_hash_algorithm
@property
def password_hash_iterations(self):
"""Gets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:return: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:rtype: ConfigNodePropertyInteger
"""
return self._password_hash_iterations
@password_hash_iterations.setter
def password_hash_iterations(self, password_hash_iterations):
"""Sets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.
:param password_hash_iterations: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:type: ConfigNodePropertyInteger
"""
self._password_hash_iterations = password_hash_iterations
@property
def password_salt_size(self):
"""Gets the password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:return: The password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:rtype: ConfigNodePropertyInteger
"""
return self._password_salt_size
@password_salt_size.setter
def password_salt_size(self, password_salt_size):
"""Sets the password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.
:param password_salt_size: The password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501
:type: ConfigNodePropertyInteger
"""
self._password_salt_size = password_salt_size
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
normal
|
{
"blob_id": "0ddac0aac5bd001504ed37d31b74c6442304e350",
"index": 5729,
"step-1": "<mask token>\n\n\nclass OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties(\n object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @property\n def token_length(self):\n \"\"\"Gets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._token_length\n\n @token_length.setter\n def token_length(self, token_length):\n \"\"\"Sets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_length: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n self._token_length = token_length\n\n @property\n def token_refresh(self):\n \"\"\"Gets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyBoolean\n \"\"\"\n return self._token_refresh\n <mask token>\n <mask token>\n\n @token_cleanup_threshold.setter\n def token_cleanup_threshold(self, token_cleanup_threshold):\n \"\"\"Sets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_cleanup_threshold: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._token_cleanup_threshold = token_cleanup_threshold\n <mask token>\n\n @password_hash_algorithm.setter\n def password_hash_algorithm(self, password_hash_algorithm):\n \"\"\"Sets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_hash_algorithm: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n self._password_hash_algorithm = password_hash_algorithm\n\n @property\n def password_hash_iterations(self):\n \"\"\"Gets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._password_hash_iterations\n\n @password_hash_iterations.setter\n def password_hash_iterations(self, password_hash_iterations):\n \"\"\"Sets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_hash_iterations: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._password_hash_iterations = password_hash_iterations\n <mask token>\n <mask token>\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.openapi_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n <mask token>\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-2": "<mask token>\n\n\nclass OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties(\n object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, token_expiration=None, token_length=None,\n token_refresh=None, token_cleanup_threshold=None,\n password_hash_algorithm=None, password_hash_iterations=None,\n password_salt_size=None):\n \"\"\"OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties - a model defined in OpenAPI\"\"\"\n self._token_expiration = None\n self._token_length = None\n self._token_refresh = None\n self._token_cleanup_threshold = None\n self._password_hash_algorithm = None\n self._password_hash_iterations = None\n self._password_salt_size = None\n self.discriminator = None\n if token_expiration is not None:\n self.token_expiration = token_expiration\n if token_length is not None:\n self.token_length = token_length\n if token_refresh is not None:\n self.token_refresh = token_refresh\n if token_cleanup_threshold is not None:\n self.token_cleanup_threshold = token_cleanup_threshold\n if password_hash_algorithm is not None:\n self.password_hash_algorithm = password_hash_algorithm\n if password_hash_iterations is not None:\n self.password_hash_iterations = password_hash_iterations\n if password_salt_size is not None:\n self.password_salt_size = password_salt_size\n\n @property\n def token_expiration(self):\n \"\"\"Gets the token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._token_expiration\n <mask token>\n\n @property\n def token_length(self):\n \"\"\"Gets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._token_length\n\n @token_length.setter\n def token_length(self, token_length):\n \"\"\"Sets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_length: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n self._token_length = token_length\n\n @property\n def token_refresh(self):\n \"\"\"Gets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyBoolean\n \"\"\"\n return self._token_refresh\n\n @token_refresh.setter\n def token_refresh(self, token_refresh):\n \"\"\"Sets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_refresh: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyBoolean\n \"\"\"\n self._token_refresh = token_refresh\n\n @property\n def token_cleanup_threshold(self):\n \"\"\"Gets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._token_cleanup_threshold\n\n @token_cleanup_threshold.setter\n def token_cleanup_threshold(self, token_cleanup_threshold):\n \"\"\"Sets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_cleanup_threshold: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._token_cleanup_threshold = token_cleanup_threshold\n\n @property\n def password_hash_algorithm(self):\n \"\"\"Gets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._password_hash_algorithm\n\n @password_hash_algorithm.setter\n def password_hash_algorithm(self, password_hash_algorithm):\n \"\"\"Sets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_hash_algorithm: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n self._password_hash_algorithm = password_hash_algorithm\n\n @property\n def password_hash_iterations(self):\n \"\"\"Gets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._password_hash_iterations\n\n @password_hash_iterations.setter\n def password_hash_iterations(self, password_hash_iterations):\n \"\"\"Sets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_hash_iterations: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._password_hash_iterations = password_hash_iterations\n <mask token>\n\n @password_salt_size.setter\n def password_salt_size(self, password_salt_size):\n \"\"\"Sets the password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_salt_size: The password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._password_salt_size = password_salt_size\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.openapi_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n <mask token>\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-3": "<mask token>\n\n\nclass OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties(\n object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, token_expiration=None, token_length=None,\n token_refresh=None, token_cleanup_threshold=None,\n password_hash_algorithm=None, password_hash_iterations=None,\n password_salt_size=None):\n \"\"\"OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties - a model defined in OpenAPI\"\"\"\n self._token_expiration = None\n self._token_length = None\n self._token_refresh = None\n self._token_cleanup_threshold = None\n self._password_hash_algorithm = None\n self._password_hash_iterations = None\n self._password_salt_size = None\n self.discriminator = None\n if token_expiration is not None:\n self.token_expiration = token_expiration\n if token_length is not None:\n self.token_length = token_length\n if token_refresh is not None:\n self.token_refresh = token_refresh\n if token_cleanup_threshold is not None:\n self.token_cleanup_threshold = token_cleanup_threshold\n if password_hash_algorithm is not None:\n self.password_hash_algorithm = password_hash_algorithm\n if password_hash_iterations is not None:\n self.password_hash_iterations = password_hash_iterations\n if password_salt_size is not None:\n self.password_salt_size = password_salt_size\n\n @property\n def token_expiration(self):\n \"\"\"Gets the token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._token_expiration\n <mask token>\n\n @property\n def token_length(self):\n \"\"\"Gets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._token_length\n\n @token_length.setter\n def token_length(self, token_length):\n \"\"\"Sets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_length: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n self._token_length = token_length\n\n @property\n def token_refresh(self):\n \"\"\"Gets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyBoolean\n \"\"\"\n return self._token_refresh\n\n @token_refresh.setter\n def token_refresh(self, token_refresh):\n \"\"\"Sets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_refresh: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyBoolean\n \"\"\"\n self._token_refresh = token_refresh\n\n @property\n def token_cleanup_threshold(self):\n \"\"\"Gets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._token_cleanup_threshold\n\n @token_cleanup_threshold.setter\n def token_cleanup_threshold(self, token_cleanup_threshold):\n \"\"\"Sets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_cleanup_threshold: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._token_cleanup_threshold = token_cleanup_threshold\n\n @property\n def password_hash_algorithm(self):\n \"\"\"Gets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._password_hash_algorithm\n\n @password_hash_algorithm.setter\n def password_hash_algorithm(self, password_hash_algorithm):\n \"\"\"Sets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_hash_algorithm: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n self._password_hash_algorithm = password_hash_algorithm\n\n @property\n def password_hash_iterations(self):\n \"\"\"Gets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._password_hash_iterations\n\n @password_hash_iterations.setter\n def password_hash_iterations(self, password_hash_iterations):\n \"\"\"Sets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_hash_iterations: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._password_hash_iterations = password_hash_iterations\n\n @property\n def password_salt_size(self):\n \"\"\"Gets the password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._password_salt_size\n\n @password_salt_size.setter\n def password_salt_size(self, password_salt_size):\n \"\"\"Sets the password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_salt_size: The password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._password_salt_size = password_salt_size\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.openapi_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n <mask token>\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-4": "<mask token>\n\n\nclass OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties(\n object):\n <mask token>\n <mask token>\n openapi_types = {'token_expiration': 'ConfigNodePropertyString',\n 'token_length': 'ConfigNodePropertyString', 'token_refresh':\n 'ConfigNodePropertyBoolean', 'token_cleanup_threshold':\n 'ConfigNodePropertyInteger', 'password_hash_algorithm':\n 'ConfigNodePropertyString', 'password_hash_iterations':\n 'ConfigNodePropertyInteger', 'password_salt_size':\n 'ConfigNodePropertyInteger'}\n attribute_map = {'token_expiration': 'tokenExpiration', 'token_length':\n 'tokenLength', 'token_refresh': 'tokenRefresh',\n 'token_cleanup_threshold': 'tokenCleanupThreshold',\n 'password_hash_algorithm': 'passwordHashAlgorithm',\n 'password_hash_iterations': 'passwordHashIterations',\n 'password_salt_size': 'passwordSaltSize'}\n\n def __init__(self, token_expiration=None, token_length=None,\n token_refresh=None, token_cleanup_threshold=None,\n password_hash_algorithm=None, password_hash_iterations=None,\n password_salt_size=None):\n \"\"\"OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties - a model defined in OpenAPI\"\"\"\n self._token_expiration = None\n self._token_length = None\n self._token_refresh = None\n self._token_cleanup_threshold = None\n self._password_hash_algorithm = None\n self._password_hash_iterations = None\n self._password_salt_size = None\n self.discriminator = None\n if token_expiration is not None:\n self.token_expiration = token_expiration\n if token_length is not None:\n self.token_length = token_length\n if token_refresh is not None:\n self.token_refresh = token_refresh\n if token_cleanup_threshold is not None:\n self.token_cleanup_threshold = token_cleanup_threshold\n if password_hash_algorithm is not None:\n self.password_hash_algorithm = password_hash_algorithm\n if password_hash_iterations is not None:\n self.password_hash_iterations = password_hash_iterations\n if password_salt_size is not None:\n self.password_salt_size = password_salt_size\n\n @property\n def token_expiration(self):\n \"\"\"Gets the token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._token_expiration\n\n @token_expiration.setter\n def token_expiration(self, token_expiration):\n \"\"\"Sets the token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_expiration: The token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n self._token_expiration = token_expiration\n\n @property\n def token_length(self):\n \"\"\"Gets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._token_length\n\n @token_length.setter\n def token_length(self, token_length):\n \"\"\"Sets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_length: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n self._token_length = token_length\n\n @property\n def token_refresh(self):\n \"\"\"Gets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyBoolean\n \"\"\"\n return self._token_refresh\n\n @token_refresh.setter\n def token_refresh(self, token_refresh):\n \"\"\"Sets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_refresh: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyBoolean\n \"\"\"\n self._token_refresh = token_refresh\n\n @property\n def token_cleanup_threshold(self):\n \"\"\"Gets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._token_cleanup_threshold\n\n @token_cleanup_threshold.setter\n def token_cleanup_threshold(self, token_cleanup_threshold):\n \"\"\"Sets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_cleanup_threshold: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._token_cleanup_threshold = token_cleanup_threshold\n\n @property\n def password_hash_algorithm(self):\n \"\"\"Gets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._password_hash_algorithm\n\n @password_hash_algorithm.setter\n def password_hash_algorithm(self, password_hash_algorithm):\n \"\"\"Sets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_hash_algorithm: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n self._password_hash_algorithm = password_hash_algorithm\n\n @property\n def password_hash_iterations(self):\n \"\"\"Gets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._password_hash_iterations\n\n @password_hash_iterations.setter\n def password_hash_iterations(self, password_hash_iterations):\n \"\"\"Sets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_hash_iterations: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._password_hash_iterations = password_hash_iterations\n\n @property\n def password_salt_size(self):\n \"\"\"Gets the password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._password_salt_size\n\n @password_salt_size.setter\n def password_salt_size(self, password_salt_size):\n \"\"\"Sets the password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_salt_size: The password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n self._password_salt_size = password_salt_size\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n for attr, _ in six.iteritems(self.openapi_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(lambda x: x.to_dict() if hasattr(x,\n 'to_dict') else x, value))\n elif hasattr(value, 'to_dict'):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(lambda item: (item[0], item[1].\n to_dict()) if hasattr(item[1], 'to_dict') else item,\n value.items()))\n else:\n result[attr] = value\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other,\n OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties\n ):\n return False\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-5": "# coding: utf-8\n\n\"\"\"\n Adobe Experience Manager OSGI config (AEM) API\n\n Swagger AEM OSGI is an OpenAPI specification for Adobe Experience Manager (AEM) OSGI Configurations API # noqa: E501\n\n OpenAPI spec version: 1.0.0-pre.0\n Contact: [email protected]\n Generated by: https://openapi-generator.tech\n\"\"\"\n\n\nimport pprint\nimport re # noqa: F401\n\nimport six\n\n\nclass OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties(object):\n \"\"\"NOTE: This class is auto generated by OpenAPI Generator.\n Ref: https://openapi-generator.tech\n\n Do not edit the class manually.\n \"\"\"\n\n \"\"\"\n Attributes:\n openapi_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n \"\"\"\n openapi_types = {\n 'token_expiration': 'ConfigNodePropertyString',\n 'token_length': 'ConfigNodePropertyString',\n 'token_refresh': 'ConfigNodePropertyBoolean',\n 'token_cleanup_threshold': 'ConfigNodePropertyInteger',\n 'password_hash_algorithm': 'ConfigNodePropertyString',\n 'password_hash_iterations': 'ConfigNodePropertyInteger',\n 'password_salt_size': 'ConfigNodePropertyInteger'\n }\n\n attribute_map = {\n 'token_expiration': 'tokenExpiration',\n 'token_length': 'tokenLength',\n 'token_refresh': 'tokenRefresh',\n 'token_cleanup_threshold': 'tokenCleanupThreshold',\n 'password_hash_algorithm': 'passwordHashAlgorithm',\n 'password_hash_iterations': 'passwordHashIterations',\n 'password_salt_size': 'passwordSaltSize'\n }\n\n def __init__(self, token_expiration=None, token_length=None, token_refresh=None, token_cleanup_threshold=None, password_hash_algorithm=None, password_hash_iterations=None, password_salt_size=None): # noqa: E501\n \"\"\"OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties - a model defined in OpenAPI\"\"\" # noqa: E501\n\n self._token_expiration = None\n self._token_length = None\n self._token_refresh = None\n self._token_cleanup_threshold = None\n self._password_hash_algorithm = None\n self._password_hash_iterations = None\n self._password_salt_size = None\n self.discriminator = None\n\n if token_expiration is not None:\n self.token_expiration = token_expiration\n if token_length is not None:\n self.token_length = token_length\n if token_refresh is not None:\n self.token_refresh = token_refresh\n if token_cleanup_threshold is not None:\n self.token_cleanup_threshold = token_cleanup_threshold\n if password_hash_algorithm is not None:\n self.password_hash_algorithm = password_hash_algorithm\n if password_hash_iterations is not None:\n self.password_hash_iterations = password_hash_iterations\n if password_salt_size is not None:\n self.password_salt_size = password_salt_size\n\n @property\n def token_expiration(self):\n \"\"\"Gets the token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._token_expiration\n\n @token_expiration.setter\n def token_expiration(self, token_expiration):\n \"\"\"Sets the token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_expiration: The token_expiration of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n\n self._token_expiration = token_expiration\n\n @property\n def token_length(self):\n \"\"\"Gets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._token_length\n\n @token_length.setter\n def token_length(self, token_length):\n \"\"\"Sets the token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_length: The token_length of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n\n self._token_length = token_length\n\n @property\n def token_refresh(self):\n \"\"\"Gets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyBoolean\n \"\"\"\n return self._token_refresh\n\n @token_refresh.setter\n def token_refresh(self, token_refresh):\n \"\"\"Sets the token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_refresh: The token_refresh of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyBoolean\n \"\"\"\n\n self._token_refresh = token_refresh\n\n @property\n def token_cleanup_threshold(self):\n \"\"\"Gets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._token_cleanup_threshold\n\n @token_cleanup_threshold.setter\n def token_cleanup_threshold(self, token_cleanup_threshold):\n \"\"\"Sets the token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param token_cleanup_threshold: The token_cleanup_threshold of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n\n self._token_cleanup_threshold = token_cleanup_threshold\n\n @property\n def password_hash_algorithm(self):\n \"\"\"Gets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyString\n \"\"\"\n return self._password_hash_algorithm\n\n @password_hash_algorithm.setter\n def password_hash_algorithm(self, password_hash_algorithm):\n \"\"\"Sets the password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_hash_algorithm: The password_hash_algorithm of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyString\n \"\"\"\n\n self._password_hash_algorithm = password_hash_algorithm\n\n @property\n def password_hash_iterations(self):\n \"\"\"Gets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._password_hash_iterations\n\n @password_hash_iterations.setter\n def password_hash_iterations(self, password_hash_iterations):\n \"\"\"Sets the password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_hash_iterations: The password_hash_iterations of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n\n self._password_hash_iterations = password_hash_iterations\n\n @property\n def password_salt_size(self):\n \"\"\"Gets the password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n\n\n :return: The password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :rtype: ConfigNodePropertyInteger\n \"\"\"\n return self._password_salt_size\n\n @password_salt_size.setter\n def password_salt_size(self, password_salt_size):\n \"\"\"Sets the password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties.\n\n\n :param password_salt_size: The password_salt_size of this OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties. # noqa: E501\n :type: ConfigNodePropertyInteger\n \"\"\"\n\n self._password_salt_size = password_salt_size\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n\n for attr, _ in six.iteritems(self.openapi_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(\n lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,\n value\n ))\n elif hasattr(value, \"to_dict\"):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(\n lambda item: (item[0], item[1].to_dict())\n if hasattr(item[1], \"to_dict\") else item,\n value.items()\n ))\n else:\n result[attr] = value\n\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, OrgApacheJackrabbitOakSecurityAuthenticationTokenTokenConfiguraProperties):\n return False\n\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n",
"step-ids": [
12,
18,
19,
22,
25
]
}
|
[
12,
18,
19,
22,
25
] |
from pytorch_lightning.callbacks import Callback
from evaluation.validator import Validator
class LSTMCallback(Callback):
def on_test_end(self, trainer, pl_module):
f = open('/evaluation.log', 'w')
for ev in pl_module.evaluation_data:
f.write(ev + '\n')
Validator(pl_module.evaluation_data, ['speed'])
|
normal
|
{
"blob_id": "42743ee2a812d8fe6fc036ba97daaff5be35564d",
"index": 4618,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass LSTMCallback(Callback):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass LSTMCallback(Callback):\n\n def on_test_end(self, trainer, pl_module):\n f = open('/evaluation.log', 'w')\n for ev in pl_module.evaluation_data:\n f.write(ev + '\\n')\n Validator(pl_module.evaluation_data, ['speed'])\n",
"step-4": "from pytorch_lightning.callbacks import Callback\nfrom evaluation.validator import Validator\n\n\nclass LSTMCallback(Callback):\n\n def on_test_end(self, trainer, pl_module):\n f = open('/evaluation.log', 'w')\n for ev in pl_module.evaluation_data:\n f.write(ev + '\\n')\n Validator(pl_module.evaluation_data, ['speed'])\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class StoryForm(ModelForm):
class Meta:
model = NewsStory
fields = ['title', 'pub_date', 'content']
widgets = {'pub_date': forms.DateInput(format='%m/%d/%Y', attrs={
'class': 'form-control', 'placeholder': 'select a date', 'type':
'date'})}
<|reserved_special_token_1|>
from django import forms
from django.forms import ModelForm, fields, widgets
from .models import NewsStory
class StoryForm(ModelForm):
class Meta:
model = NewsStory
fields = ['title', 'pub_date', 'content']
widgets = {'pub_date': forms.DateInput(format='%m/%d/%Y', attrs={
'class': 'form-control', 'placeholder': 'select a date', 'type':
'date'})}
<|reserved_special_token_1|>
from django import forms
from django.forms import ModelForm, fields, widgets
from .models import NewsStory
class StoryForm(ModelForm):
class Meta:
model = NewsStory
fields = ['title' , 'pub_date' , 'content']
widgets = {
'pub_date': forms.DateInput(format=('%m/%d/%Y'), attrs={'class':'form-control', 'placeholder':'select a date', 'type':'date'}),
}
|
flexible
|
{
"blob_id": "47a5ddcea2f6d8ce80793192d26c98ccc0e0340d",
"index": 1771,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass StoryForm(ModelForm):\n\n\n class Meta:\n model = NewsStory\n fields = ['title', 'pub_date', 'content']\n widgets = {'pub_date': forms.DateInput(format='%m/%d/%Y', attrs={\n 'class': 'form-control', 'placeholder': 'select a date', 'type':\n 'date'})}\n",
"step-3": "from django import forms\nfrom django.forms import ModelForm, fields, widgets\nfrom .models import NewsStory\n\n\nclass StoryForm(ModelForm):\n\n\n class Meta:\n model = NewsStory\n fields = ['title', 'pub_date', 'content']\n widgets = {'pub_date': forms.DateInput(format='%m/%d/%Y', attrs={\n 'class': 'form-control', 'placeholder': 'select a date', 'type':\n 'date'})}\n",
"step-4": "from django import forms\nfrom django.forms import ModelForm, fields, widgets\nfrom .models import NewsStory\n\nclass StoryForm(ModelForm):\n class Meta:\n model = NewsStory\n fields = ['title' , 'pub_date' , 'content']\n widgets = {\n 'pub_date': forms.DateInput(format=('%m/%d/%Y'), attrs={'class':'form-control', 'placeholder':'select a date', 'type':'date'}),\n\n }\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def measure_creation():
random.shuffle(TYPES)
for type_ in TYPES:
pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)
body = '{}(a, b, c)'.format(type_.__name__)
print('\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,
repeat=5))
def test_immut():
"""Verifies that the type called SimpleObjectImmutable
actually satisfies that definition.
"""
from types_ import read_only
q = SimpleObjectImmutable(a, b, c)
SimpleObjectImmutable.__setattr__ = read_only
try:
q.a = 1
assert False
except ValueError:
assert True
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def measure_creation():
random.shuffle(TYPES)
for type_ in TYPES:
pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)
body = '{}(a, b, c)'.format(type_.__name__)
print('\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,
repeat=5))
def test_immut():
"""Verifies that the type called SimpleObjectImmutable
actually satisfies that definition.
"""
from types_ import read_only
q = SimpleObjectImmutable(a, b, c)
SimpleObjectImmutable.__setattr__ = read_only
try:
q.a = 1
assert False
except ValueError:
assert True
if __name__ == '__main__':
measure_creation()
test_immut()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
TYPES = [SimpleObjectImmutable, SimpleObject, NamedTuple, SimpleTuple, c_struct
]
a = 1035
b = b'T - fo!'
c = [1, 5, 66]
def measure_creation():
random.shuffle(TYPES)
for type_ in TYPES:
pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)
body = '{}(a, b, c)'.format(type_.__name__)
print('\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,
repeat=5))
def test_immut():
"""Verifies that the type called SimpleObjectImmutable
actually satisfies that definition.
"""
from types_ import read_only
q = SimpleObjectImmutable(a, b, c)
SimpleObjectImmutable.__setattr__ = read_only
try:
q.a = 1
assert False
except ValueError:
assert True
if __name__ == '__main__':
measure_creation()
test_immut()
<|reserved_special_token_1|>
from __future__ import print_function
from types_ import SimpleObject, SimpleObjectImmutable, NamedTuple, SimpleTuple, c_struct
import timeit
import random
TYPES = [SimpleObjectImmutable, SimpleObject, NamedTuple, SimpleTuple, c_struct
]
a = 1035
b = b'T - fo!'
c = [1, 5, 66]
def measure_creation():
random.shuffle(TYPES)
for type_ in TYPES:
pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)
body = '{}(a, b, c)'.format(type_.__name__)
print('\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,
repeat=5))
def test_immut():
"""Verifies that the type called SimpleObjectImmutable
actually satisfies that definition.
"""
from types_ import read_only
q = SimpleObjectImmutable(a, b, c)
SimpleObjectImmutable.__setattr__ = read_only
try:
q.a = 1
assert False
except ValueError:
assert True
if __name__ == '__main__':
measure_creation()
test_immut()
<|reserved_special_token_1|>
#!/usr/bin/env python
from __future__ import print_function
from types_ import SimpleObject, SimpleObjectImmutable, NamedTuple, SimpleTuple, c_struct
import timeit
import random
TYPES = [
SimpleObjectImmutable,
SimpleObject,
NamedTuple,
SimpleTuple,
c_struct,
]
a = 1035
b = b'\x54 - fo!'
c = [1, 5, 66, ]
def measure_creation():
random.shuffle(TYPES)
for type_ in TYPES:
pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)
body = '{}(a, b, c)'.format(type_.__name__)
print('\t', type_.__name__, timeit.repeat(stmt=body, setup=pre, repeat=5))
def test_immut():
'''Verifies that the type called SimpleObjectImmutable
actually satisfies that definition.
'''
from types_ import read_only
q = SimpleObjectImmutable(a, b, c)
SimpleObjectImmutable.__setattr__ = read_only
try:
q.a = 1
assert(False)
except ValueError:
assert(True)
if __name__ == '__main__':
measure_creation()
test_immut()
|
flexible
|
{
"blob_id": "ba73562cd8ffa52a1fede35c3325e7e76a6dad54",
"index": 7966,
"step-1": "<mask token>\n\n\ndef measure_creation():\n random.shuffle(TYPES)\n for type_ in TYPES:\n pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)\n body = '{}(a, b, c)'.format(type_.__name__)\n print('\\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,\n repeat=5))\n\n\ndef test_immut():\n \"\"\"Verifies that the type called SimpleObjectImmutable\n actually satisfies that definition.\n \"\"\"\n from types_ import read_only\n q = SimpleObjectImmutable(a, b, c)\n SimpleObjectImmutable.__setattr__ = read_only\n try:\n q.a = 1\n assert False\n except ValueError:\n assert True\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef measure_creation():\n random.shuffle(TYPES)\n for type_ in TYPES:\n pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)\n body = '{}(a, b, c)'.format(type_.__name__)\n print('\\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,\n repeat=5))\n\n\ndef test_immut():\n \"\"\"Verifies that the type called SimpleObjectImmutable\n actually satisfies that definition.\n \"\"\"\n from types_ import read_only\n q = SimpleObjectImmutable(a, b, c)\n SimpleObjectImmutable.__setattr__ = read_only\n try:\n q.a = 1\n assert False\n except ValueError:\n assert True\n\n\nif __name__ == '__main__':\n measure_creation()\n test_immut()\n",
"step-3": "<mask token>\nTYPES = [SimpleObjectImmutable, SimpleObject, NamedTuple, SimpleTuple, c_struct\n ]\na = 1035\nb = b'T - fo!'\nc = [1, 5, 66]\n\n\ndef measure_creation():\n random.shuffle(TYPES)\n for type_ in TYPES:\n pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)\n body = '{}(a, b, c)'.format(type_.__name__)\n print('\\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,\n repeat=5))\n\n\ndef test_immut():\n \"\"\"Verifies that the type called SimpleObjectImmutable\n actually satisfies that definition.\n \"\"\"\n from types_ import read_only\n q = SimpleObjectImmutable(a, b, c)\n SimpleObjectImmutable.__setattr__ = read_only\n try:\n q.a = 1\n assert False\n except ValueError:\n assert True\n\n\nif __name__ == '__main__':\n measure_creation()\n test_immut()\n",
"step-4": "from __future__ import print_function\nfrom types_ import SimpleObject, SimpleObjectImmutable, NamedTuple, SimpleTuple, c_struct\nimport timeit\nimport random\nTYPES = [SimpleObjectImmutable, SimpleObject, NamedTuple, SimpleTuple, c_struct\n ]\na = 1035\nb = b'T - fo!'\nc = [1, 5, 66]\n\n\ndef measure_creation():\n random.shuffle(TYPES)\n for type_ in TYPES:\n pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)\n body = '{}(a, b, c)'.format(type_.__name__)\n print('\\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,\n repeat=5))\n\n\ndef test_immut():\n \"\"\"Verifies that the type called SimpleObjectImmutable\n actually satisfies that definition.\n \"\"\"\n from types_ import read_only\n q = SimpleObjectImmutable(a, b, c)\n SimpleObjectImmutable.__setattr__ = read_only\n try:\n q.a = 1\n assert False\n except ValueError:\n assert True\n\n\nif __name__ == '__main__':\n measure_creation()\n test_immut()\n",
"step-5": "#!/usr/bin/env python\n\nfrom __future__ import print_function\n\nfrom types_ import SimpleObject, SimpleObjectImmutable, NamedTuple, SimpleTuple, c_struct\nimport timeit\nimport random\n\nTYPES = [\n SimpleObjectImmutable,\n SimpleObject,\n NamedTuple,\n SimpleTuple,\n c_struct,\n ]\n\na = 1035\nb = b'\\x54 - fo!'\nc = [1, 5, 66, ]\n\ndef measure_creation():\n random.shuffle(TYPES)\n\n for type_ in TYPES:\n pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)\n body = '{}(a, b, c)'.format(type_.__name__)\n print('\\t', type_.__name__, timeit.repeat(stmt=body, setup=pre, repeat=5))\n\n\ndef test_immut():\n '''Verifies that the type called SimpleObjectImmutable\n actually satisfies that definition.\n '''\n from types_ import read_only\n\n q = SimpleObjectImmutable(a, b, c)\n SimpleObjectImmutable.__setattr__ = read_only\n try:\n q.a = 1\n assert(False)\n except ValueError:\n assert(True)\n\nif __name__ == '__main__':\n measure_creation()\n\n test_immut()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PyrpgConfig(AppConfig):
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PyrpgConfig(AppConfig):
name = 'PyRPG'
<|reserved_special_token_1|>
from django.apps import AppConfig
class PyrpgConfig(AppConfig):
name = 'PyRPG'
|
flexible
|
{
"blob_id": "f8bf7e2d8f06bbd00f04047153833c07bf483fd3",
"index": 259,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass PyrpgConfig(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass PyrpgConfig(AppConfig):\n name = 'PyRPG'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass PyrpgConfig(AppConfig):\n name = 'PyRPG'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def main():
piso = largura * comprimento
volume_sala = largura * comprimento * altura
area = 2 * altura * largura + 2 * altura * comprimento
print(piso)
print(volume_sala)
print(area)
altura = float(input(""))
largura = float(input(""))
comprimento = float(input(""))
if __name__ == '__main__':
main()
|
normal
|
{
"blob_id": "d78fd8ebf9ef55700a25a9ce96d9094f1bfa564e",
"index": 6455,
"step-1": "<mask token>\n",
"step-2": "def main():\n piso = largura * comprimento\n volume_sala = largura * comprimento * altura\n area = 2 * altura * largura + 2 * altura * comprimento\n print(piso)\n print(volume_sala)\n print(area)\n\n\n<mask token>\n",
"step-3": "def main():\n piso = largura * comprimento\n volume_sala = largura * comprimento * altura\n area = 2 * altura * largura + 2 * altura * comprimento\n print(piso)\n print(volume_sala)\n print(area)\n\n\n<mask token>\nif __name__ == '__main__':\n main()\n",
"step-4": "def main():\n piso = largura * comprimento\n volume_sala = largura * comprimento * altura\n area = 2 * altura * largura + 2 * altura * comprimento\n print(piso)\n print(volume_sala)\n print(area)\n\n\naltura = float(input(''))\nlargura = float(input(''))\ncomprimento = float(input(''))\nif __name__ == '__main__':\n main()\n",
"step-5": "def main():\n piso = largura * comprimento\n volume_sala = largura * comprimento * altura\n area = 2 * altura * largura + 2 * altura * comprimento\n print(piso)\n print(volume_sala)\n print(area)\n\naltura = float(input(\"\"))\nlargura = float(input(\"\"))\ncomprimento = float(input(\"\"))\n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from django.db import models
from django.contrib.auth.models import User
from Event.models import Event
from University.models import University
from django.core.validators import validate_email
class Person(models.Model):
user = models.ForeignKey(User, related_name='person', on_delete=models.
CASCADE, blank=True, null=True)
event = models.ForeignKey(Event, on_delete=models.CASCADE)
name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
email = models.EmailField(unique=True, validators=[validate_email])
university = models.ForeignKey(University, on_delete=models.PROTECT)
rut = models.CharField(max_length=13, unique=True)
phone_number = models.CharField(max_length=20)
emergency_phone_number = models.CharField(max_length=20, null=True)
avatar = models.ImageField(upload_to='person_avatars/', blank=True)
pending_messages = models.IntegerField(default=0)
def __str__(self):
return '{} {}'.format(self.name, self.last_name)
class PersonTemporaryCode(models.Model):
person = models.ForeignKey(Person, on_delete=models.CASCADE)
code = models.IntegerField()
expiration_date = models.DateTimeField()
def __str__(self):
return f'{self.person} - {self.code} -- {self.expiration_date}'
|
normal
|
{
"blob_id": "28f4f14c3c29ee96c370ffe71c268549552b915e",
"index": 2419,
"step-1": "<mask token>\n\n\nclass PersonTemporaryCode(models.Model):\n person = models.ForeignKey(Person, on_delete=models.CASCADE)\n code = models.IntegerField()\n expiration_date = models.DateTimeField()\n\n def __str__(self):\n return f'{self.person} - {self.code} -- {self.expiration_date}'\n",
"step-2": "<mask token>\n\n\nclass Person(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass PersonTemporaryCode(models.Model):\n person = models.ForeignKey(Person, on_delete=models.CASCADE)\n code = models.IntegerField()\n expiration_date = models.DateTimeField()\n\n def __str__(self):\n return f'{self.person} - {self.code} -- {self.expiration_date}'\n",
"step-3": "<mask token>\n\n\nclass Person(models.Model):\n user = models.ForeignKey(User, related_name='person', on_delete=models.\n CASCADE, blank=True, null=True)\n event = models.ForeignKey(Event, on_delete=models.CASCADE)\n name = models.CharField(max_length=100)\n last_name = models.CharField(max_length=100)\n email = models.EmailField(unique=True, validators=[validate_email])\n university = models.ForeignKey(University, on_delete=models.PROTECT)\n rut = models.CharField(max_length=13, unique=True)\n phone_number = models.CharField(max_length=20)\n emergency_phone_number = models.CharField(max_length=20, null=True)\n avatar = models.ImageField(upload_to='person_avatars/', blank=True)\n pending_messages = models.IntegerField(default=0)\n\n def __str__(self):\n return '{} {}'.format(self.name, self.last_name)\n\n\nclass PersonTemporaryCode(models.Model):\n person = models.ForeignKey(Person, on_delete=models.CASCADE)\n code = models.IntegerField()\n expiration_date = models.DateTimeField()\n\n def __str__(self):\n return f'{self.person} - {self.code} -- {self.expiration_date}'\n",
"step-4": "from django.db import models\nfrom django.contrib.auth.models import User\nfrom Event.models import Event\nfrom University.models import University\nfrom django.core.validators import validate_email\n\n\nclass Person(models.Model):\n user = models.ForeignKey(User, related_name='person', on_delete=models.\n CASCADE, blank=True, null=True)\n event = models.ForeignKey(Event, on_delete=models.CASCADE)\n name = models.CharField(max_length=100)\n last_name = models.CharField(max_length=100)\n email = models.EmailField(unique=True, validators=[validate_email])\n university = models.ForeignKey(University, on_delete=models.PROTECT)\n rut = models.CharField(max_length=13, unique=True)\n phone_number = models.CharField(max_length=20)\n emergency_phone_number = models.CharField(max_length=20, null=True)\n avatar = models.ImageField(upload_to='person_avatars/', blank=True)\n pending_messages = models.IntegerField(default=0)\n\n def __str__(self):\n return '{} {}'.format(self.name, self.last_name)\n\n\nclass PersonTemporaryCode(models.Model):\n person = models.ForeignKey(Person, on_delete=models.CASCADE)\n code = models.IntegerField()\n expiration_date = models.DateTimeField()\n\n def __str__(self):\n return f'{self.person} - {self.code} -- {self.expiration_date}'\n",
"step-5": null,
"step-ids": [
3,
4,
6,
7
]
}
|
[
3,
4,
6,
7
] |
import os
import re
import time
import numpy as np
import pandas as pd
from sklearn.cluster import AgglomerativeClustering
import math
import edlib
from progress.bar import IncrementalBar as Bar
from multiprocessing import Pool
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--pools",
default=4,
type=int,
help="Number of threads to use in aligning. Default 4. Optional."
)
parser.add_argument("--misses",
default=5,
type=float,
help="Number of allowed substitutions/insertions/deletions in aligning a sequence of length k. "
"For longer sequences, this is scaled. "
)
parser.add_argument("--aligned",
default=None,
type=str,
help="Path to the output aligned directory. Required."
)
parser.add_argument("--overview",
default=None,
type=str,
help="Path to the output description csv. Required. Pairs with <--aligned> directory."
)
parser.add_argument("--k",
default=-1,
type=int,
help="Size of the k-mer created by BCALM. Required."
)
parser.add_argument("--input",
default=None,
type=str,
help="Path to the input file."
)
parser.set_defaults(all_sqs_result=False)
args = parser.parse_args([] if "__file__" not in globals() else None)
bases = dict(A=0, C=1, G=2, T=3)
bases['-'] = 4
rev_bases = {v: k for k, v in bases.items()}
global_alignment_ident_no = 0
operations = {
'.' : 0,
'-' : 1,
'|' : 0
}
class AlignmentProfile:
def __init__(self, width, df, identifier):
self.ident = identifier
self.profile = np.zeros((5, width))
self.repre_sq = ""
self.seq_alignments = None # this will be a pandas df
self.seq_align_counter = -1
self.calculate_profile(df)
def calculate_profile(self, df):
self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.shape[1], dtype=np.int8)) for index in df.index])
unwrapped_sq = df['sq'].str.split('', expand=True)
unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0], unwrapped_sq.columns[-1]])
counts = np.stack(df['count'].values)
for base in bases:
a = unwrapped_sq != base
newX = np.ma.array(counts, mask=a)
new_counts = newX.sum(axis=0)
self.profile[bases[base], :] += new_counts
# repre_sq
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = "".join([rev_bases[x] for x in maxs])
def add_sequence(self, new_sq, new_counts, nice, sq_index):
offset = re.search(nice['target_aligned'].replace('-', ''), self.repre_sq).start(0)
x = self.profile
# padding with the following number of observed positions (sum of all bases)
# pad profile with insertions
insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]
for i, index in enumerate(insertions):
if x.shape[1] >= index:
value = 0
else:
value = x[:, index].sum()
x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)
self.seq_alignments.insert(loc=int(index+offset), column=self.seq_align_counter, value=1)
self.seq_align_counter -= 1
# pad new counts with deletions
aligned_query = np.array(list(nice['query_aligned']))
deletions = np.where(aligned_query == '-')[0]
for i, index in enumerate(deletions):
value = new_counts[index]
new_counts = np.insert(new_counts, index, value, axis=0)
i = offset
for base, count in zip(aligned_query, new_counts):
x[bases[base], i] += count
i += 1
self.profile = x
# store new sequence alignment
added_alignment = -np.ones(self.profile.shape[1])
for i, char in enumerate(nice['target_aligned']):
if char == '-':
added_alignment[offset + i] = 1
else:
added_alignment[offset + i] = 0
self.seq_alignments.loc[-1] = [sq_index, *added_alignment] # adding a row
self.seq_alignments.index = self.seq_alignments.index + 1 # shifting index
# recalculate repre_sq -- the most probable one
maxs = np.argmax(self.profile, axis=0)
self.repre_sq = "".join([rev_bases[x] for x in maxs if rev_bases[x] != '-']) # '-' is removed from the sq
def dst_func(x, y):
return (np.array(x) != np.array(y)).sum()
def read_alignment(filename):
for line in open(filename):
sq, count = line.strip('\n').split(';')
yield sq, np.array([int(x) for x in count.split(',')]), count
def cluster_group(df_group, l, dst=dst_func):
sqs = df_group.reset_index()['sq']
n = len(sqs)
if n <= 1:
return np.zeros(n)
dst_matrix = np.zeros((n, n))
for i in range(n):
for j in range(i):
d = dst(sqs[i], sqs[j])
dst_matrix[i, j] = d
dst_matrix[j, i] = d
model = AgglomerativeClustering(distance_threshold=threshold * l,
n_clusters=None,
linkage='complete',
affinity='precomputed')
clusters = model.fit_predict(dst_matrix)
return clusters
aligned_sqs_file = args.input
k = args.k
misses = args.misses
pools = args.pools
threshold = misses / k
if args.aligned is None:
output_profile_dir = aligned_sqs_file + "_profiles"
else:
output_profile_dir = args.aligned
if args.overview is None:
output_csv_file = aligned_sqs_file + "_overview.csv"
else:
output_csv_file = args.overview
# read
df = pd.DataFrame(read_alignment(aligned_sqs_file))
df.columns = ['sq', 'count', 'str_count']
df['length'] = df['sq'].str.len()
# df['alignment'] = -1 # every aligned sq has an alignment identification
groups = df.groupby(by='length')
unique_lengths = df['length'].sort_values(ascending=False).unique()
against = []
longest = unique_lengths[0]
df_group = groups.get_group(longest).copy()
clusters = cluster_group(df_group, longest)
df_group['cluster'] = clusters
alignments = {
}
for cluster, cluster_df in df_group.groupby(by='cluster'):
alignment = AlignmentProfile(longest, cluster_df, global_alignment_ident_no)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
# df.loc[df['sq'].isin(cluster_df['sq']), 'alignment'] = alignment.ident
# to each sequence
start = time.time()
# print(df.groupby(by='length').get_group(longest))
# print("running on shorter")
with Bar("Processing length groups...", max=len(unique_lengths) - 1) as bar:
for length in unique_lengths[1:]:
bar.next()
df_group = groups.get_group(length).copy()
def getDistanceAndAlignment(sq):
# this is a fallback, it should not happen
maxval = np.floor(threshold * len(sq))
min = np.inf
min_target = None
if maxval < 1:
return min,min_target
for target in against:
align_res = edlib.align(sq, target.repre_sq, mode='HW', task='distance', k=maxval)
if align_res['editDistance'] != -1:
if min > align_res['editDistance']:
if align_res['editDistance'] == 0:
return align_res['editDistance'], target.ident
min = align_res['editDistance']
min_target = target
if min_target is not None:
min_target = min_target.ident
return min, min_target
x = length * threshold
if length * threshold >= 1:
# try align
with Pool(pools) as pool:
result = pool.map(getDistanceAndAlignment, df_group['sq'])
df_group['aligned'] = result
# add aligned to profiles
aligned = df_group[df_group['aligned'] != (np.inf, None)]
for index, row in aligned.iterrows():
to = alignments[row['aligned'][1]]
align_res = edlib.align(row.sq, to.repre_sq, mode='HW', task='path')
nice = edlib.getNiceAlignment(align_res, row.sq, to.repre_sq)
to.add_sequence(row.sq, row['count'], nice, index)
# df.loc[df['sq'] == row.sq, 'alignment'] = to.ident
# cluster unaligned, add to against
unaligned = df_group[df_group['aligned'] == (np.inf, None)].copy()
clusters = cluster_group(unaligned, length)
unaligned['cluster'] = clusters
for cluster, cluster_df in unaligned.groupby(by='cluster'):
alignment = AlignmentProfile(length, cluster_df, global_alignment_ident_no)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
else:
# threshold is less than one, no clustering nor alignment takes place
df_group["aligned"] = [(np.inf, None) for _ in range(len(df_group))]
unaligned = df_group.copy()
unaligned["cluster"] = list(range(len(unaligned)))
# print(f"pseudoclustering elapsed: {time.time() - s}")
s = time.time()
for i, row in unaligned.iterrows():
cluster_df = pd.DataFrame(row).T
alignment = AlignmentProfile(length, cluster_df, global_alignment_ident_no)
alignments[global_alignment_ident_no] = alignment
global_alignment_ident_no += 1
against.append(alignment)
# print(f"alignment elapsed: {time.time() - s}")
print(f"{aligned_sqs_file} elapsed: {time.time() - start}")
print(f"{aligned_sqs_file} writing...")
os.makedirs(output_profile_dir, exist_ok=True)
for alignment in against:
filename = f"{output_profile_dir}/{alignment.ident}.prf"
np.save(filename, alignment.profile)
# get actual alignment for each sq
all_alignments = []
for alignment in against:
itemized = alignment.seq_alignments
num_cols = itemized.columns[1:]
# index_col = itemized.columns[0]
# translate to sth readable
for col in num_cols:
itemized[col] = itemized[col].astype(int).apply(str)
itemized['alignment_actual'] = itemized[num_cols].agg(','.join, axis=1) # todo maybe cigar?
itemized = itemized.drop(columns=num_cols)
itemized.columns = ['index_df', 'alignment_actual']
itemized['alignment'] = alignment.ident
all_alignments.append(itemized)
all_alignments = pd.concat(all_alignments)
merged = pd.merge(all_alignments, df, left_on='index_df', right_index=True)
# write sequences in df
merged.drop(columns=['count', 'index_df']).to_csv(output_csv_file, index=False)
print(f"{aligned_sqs_file} done")
|
normal
|
{
"blob_id": "7ae328bcfdec2d17fceb5d707f13cf495fde4469",
"index": 7490,
"step-1": "<mask token>\n\n\nclass AlignmentProfile:\n\n def __init__(self, width, df, identifier):\n self.ident = identifier\n self.profile = np.zeros((5, width))\n self.repre_sq = ''\n self.seq_alignments = None\n self.seq_align_counter = -1\n self.calculate_profile(df)\n\n def calculate_profile(self, df):\n self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.\n shape[1], dtype=np.int8)) for index in df.index])\n unwrapped_sq = df['sq'].str.split('', expand=True)\n unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],\n unwrapped_sq.columns[-1]])\n counts = np.stack(df['count'].values)\n for base in bases:\n a = unwrapped_sq != base\n newX = np.ma.array(counts, mask=a)\n new_counts = newX.sum(axis=0)\n self.profile[bases[base], :] += new_counts\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs])\n\n def add_sequence(self, new_sq, new_counts, nice, sq_index):\n offset = re.search(nice['target_aligned'].replace('-', ''), self.\n repre_sq).start(0)\n x = self.profile\n insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]\n for i, index in enumerate(insertions):\n if x.shape[1] >= index:\n value = 0\n else:\n value = x[:, index].sum()\n x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)\n self.seq_alignments.insert(loc=int(index + offset), column=self\n .seq_align_counter, value=1)\n self.seq_align_counter -= 1\n aligned_query = np.array(list(nice['query_aligned']))\n deletions = np.where(aligned_query == '-')[0]\n for i, index in enumerate(deletions):\n value = new_counts[index]\n new_counts = np.insert(new_counts, index, value, axis=0)\n i = offset\n for base, count in zip(aligned_query, new_counts):\n x[bases[base], i] += count\n i += 1\n self.profile = x\n added_alignment = -np.ones(self.profile.shape[1])\n for i, char in enumerate(nice['target_aligned']):\n if char == '-':\n added_alignment[offset + i] = 1\n else:\n added_alignment[offset + i] = 0\n self.seq_alignments.loc[-1] = [sq_index, *added_alignment]\n self.seq_alignments.index = self.seq_alignments.index + 1\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=\n '-'])\n\n\n<mask token>\n\n\ndef cluster_group(df_group, l, dst=dst_func):\n sqs = df_group.reset_index()['sq']\n n = len(sqs)\n if n <= 1:\n return np.zeros(n)\n dst_matrix = np.zeros((n, n))\n for i in range(n):\n for j in range(i):\n d = dst(sqs[i], sqs[j])\n dst_matrix[i, j] = d\n dst_matrix[j, i] = d\n model = AgglomerativeClustering(distance_threshold=threshold * l,\n n_clusters=None, linkage='complete', affinity='precomputed')\n clusters = model.fit_predict(dst_matrix)\n return clusters\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass AlignmentProfile:\n\n def __init__(self, width, df, identifier):\n self.ident = identifier\n self.profile = np.zeros((5, width))\n self.repre_sq = ''\n self.seq_alignments = None\n self.seq_align_counter = -1\n self.calculate_profile(df)\n\n def calculate_profile(self, df):\n self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.\n shape[1], dtype=np.int8)) for index in df.index])\n unwrapped_sq = df['sq'].str.split('', expand=True)\n unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],\n unwrapped_sq.columns[-1]])\n counts = np.stack(df['count'].values)\n for base in bases:\n a = unwrapped_sq != base\n newX = np.ma.array(counts, mask=a)\n new_counts = newX.sum(axis=0)\n self.profile[bases[base], :] += new_counts\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs])\n\n def add_sequence(self, new_sq, new_counts, nice, sq_index):\n offset = re.search(nice['target_aligned'].replace('-', ''), self.\n repre_sq).start(0)\n x = self.profile\n insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]\n for i, index in enumerate(insertions):\n if x.shape[1] >= index:\n value = 0\n else:\n value = x[:, index].sum()\n x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)\n self.seq_alignments.insert(loc=int(index + offset), column=self\n .seq_align_counter, value=1)\n self.seq_align_counter -= 1\n aligned_query = np.array(list(nice['query_aligned']))\n deletions = np.where(aligned_query == '-')[0]\n for i, index in enumerate(deletions):\n value = new_counts[index]\n new_counts = np.insert(new_counts, index, value, axis=0)\n i = offset\n for base, count in zip(aligned_query, new_counts):\n x[bases[base], i] += count\n i += 1\n self.profile = x\n added_alignment = -np.ones(self.profile.shape[1])\n for i, char in enumerate(nice['target_aligned']):\n if char == '-':\n added_alignment[offset + i] = 1\n else:\n added_alignment[offset + i] = 0\n self.seq_alignments.loc[-1] = [sq_index, *added_alignment]\n self.seq_alignments.index = self.seq_alignments.index + 1\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=\n '-'])\n\n\ndef dst_func(x, y):\n return (np.array(x) != np.array(y)).sum()\n\n\ndef read_alignment(filename):\n for line in open(filename):\n sq, count = line.strip('\\n').split(';')\n yield sq, np.array([int(x) for x in count.split(',')]), count\n\n\ndef cluster_group(df_group, l, dst=dst_func):\n sqs = df_group.reset_index()['sq']\n n = len(sqs)\n if n <= 1:\n return np.zeros(n)\n dst_matrix = np.zeros((n, n))\n for i in range(n):\n for j in range(i):\n d = dst(sqs[i], sqs[j])\n dst_matrix[i, j] = d\n dst_matrix[j, i] = d\n model = AgglomerativeClustering(distance_threshold=threshold * l,\n n_clusters=None, linkage='complete', affinity='precomputed')\n clusters = model.fit_predict(dst_matrix)\n return clusters\n\n\n<mask token>\n",
"step-3": "<mask token>\nparser.add_argument('--pools', default=4, type=int, help=\n 'Number of threads to use in aligning. Default 4. Optional.')\nparser.add_argument('--misses', default=5, type=float, help=\n 'Number of allowed substitutions/insertions/deletions in aligning a sequence of length k. For longer sequences, this is scaled. '\n )\nparser.add_argument('--aligned', default=None, type=str, help=\n 'Path to the output aligned directory. Required.')\nparser.add_argument('--overview', default=None, type=str, help=\n 'Path to the output description csv. Required. Pairs with <--aligned> directory.'\n )\nparser.add_argument('--k', default=-1, type=int, help=\n 'Size of the k-mer created by BCALM. Required.')\nparser.add_argument('--input', default=None, type=str, help=\n 'Path to the input file.')\nparser.set_defaults(all_sqs_result=False)\n<mask token>\n\n\nclass AlignmentProfile:\n\n def __init__(self, width, df, identifier):\n self.ident = identifier\n self.profile = np.zeros((5, width))\n self.repre_sq = ''\n self.seq_alignments = None\n self.seq_align_counter = -1\n self.calculate_profile(df)\n\n def calculate_profile(self, df):\n self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.\n shape[1], dtype=np.int8)) for index in df.index])\n unwrapped_sq = df['sq'].str.split('', expand=True)\n unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],\n unwrapped_sq.columns[-1]])\n counts = np.stack(df['count'].values)\n for base in bases:\n a = unwrapped_sq != base\n newX = np.ma.array(counts, mask=a)\n new_counts = newX.sum(axis=0)\n self.profile[bases[base], :] += new_counts\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs])\n\n def add_sequence(self, new_sq, new_counts, nice, sq_index):\n offset = re.search(nice['target_aligned'].replace('-', ''), self.\n repre_sq).start(0)\n x = self.profile\n insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]\n for i, index in enumerate(insertions):\n if x.shape[1] >= index:\n value = 0\n else:\n value = x[:, index].sum()\n x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)\n self.seq_alignments.insert(loc=int(index + offset), column=self\n .seq_align_counter, value=1)\n self.seq_align_counter -= 1\n aligned_query = np.array(list(nice['query_aligned']))\n deletions = np.where(aligned_query == '-')[0]\n for i, index in enumerate(deletions):\n value = new_counts[index]\n new_counts = np.insert(new_counts, index, value, axis=0)\n i = offset\n for base, count in zip(aligned_query, new_counts):\n x[bases[base], i] += count\n i += 1\n self.profile = x\n added_alignment = -np.ones(self.profile.shape[1])\n for i, char in enumerate(nice['target_aligned']):\n if char == '-':\n added_alignment[offset + i] = 1\n else:\n added_alignment[offset + i] = 0\n self.seq_alignments.loc[-1] = [sq_index, *added_alignment]\n self.seq_alignments.index = self.seq_alignments.index + 1\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=\n '-'])\n\n\ndef dst_func(x, y):\n return (np.array(x) != np.array(y)).sum()\n\n\ndef read_alignment(filename):\n for line in open(filename):\n sq, count = line.strip('\\n').split(';')\n yield sq, np.array([int(x) for x in count.split(',')]), count\n\n\ndef cluster_group(df_group, l, dst=dst_func):\n sqs = df_group.reset_index()['sq']\n n = len(sqs)\n if n <= 1:\n return np.zeros(n)\n dst_matrix = np.zeros((n, n))\n for i in range(n):\n for j in range(i):\n d = dst(sqs[i], sqs[j])\n dst_matrix[i, j] = d\n dst_matrix[j, i] = d\n model = AgglomerativeClustering(distance_threshold=threshold * l,\n n_clusters=None, linkage='complete', affinity='precomputed')\n clusters = model.fit_predict(dst_matrix)\n return clusters\n\n\n<mask token>\nif args.aligned is None:\n output_profile_dir = aligned_sqs_file + '_profiles'\nelse:\n output_profile_dir = args.aligned\nif args.overview is None:\n output_csv_file = aligned_sqs_file + '_overview.csv'\nelse:\n output_csv_file = args.overview\n<mask token>\nfor cluster, cluster_df in df_group.groupby(by='cluster'):\n alignment = AlignmentProfile(longest, cluster_df, global_alignment_ident_no\n )\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\n<mask token>\nwith Bar('Processing length groups...', max=len(unique_lengths) - 1) as bar:\n for length in unique_lengths[1:]:\n bar.next()\n df_group = groups.get_group(length).copy()\n\n def getDistanceAndAlignment(sq):\n maxval = np.floor(threshold * len(sq))\n min = np.inf\n min_target = None\n if maxval < 1:\n return min, min_target\n for target in against:\n align_res = edlib.align(sq, target.repre_sq, mode='HW',\n task='distance', k=maxval)\n if align_res['editDistance'] != -1:\n if min > align_res['editDistance']:\n if align_res['editDistance'] == 0:\n return align_res['editDistance'], target.ident\n min = align_res['editDistance']\n min_target = target\n if min_target is not None:\n min_target = min_target.ident\n return min, min_target\n x = length * threshold\n if length * threshold >= 1:\n with Pool(pools) as pool:\n result = pool.map(getDistanceAndAlignment, df_group['sq'])\n df_group['aligned'] = result\n aligned = df_group[df_group['aligned'] != (np.inf, None)]\n for index, row in aligned.iterrows():\n to = alignments[row['aligned'][1]]\n align_res = edlib.align(row.sq, to.repre_sq, mode='HW',\n task='path')\n nice = edlib.getNiceAlignment(align_res, row.sq, to.repre_sq)\n to.add_sequence(row.sq, row['count'], nice, index)\n unaligned = df_group[df_group['aligned'] == (np.inf, None)].copy()\n clusters = cluster_group(unaligned, length)\n unaligned['cluster'] = clusters\n for cluster, cluster_df in unaligned.groupby(by='cluster'):\n alignment = AlignmentProfile(length, cluster_df,\n global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\n else:\n df_group['aligned'] = [(np.inf, None) for _ in range(len(df_group))\n ]\n unaligned = df_group.copy()\n unaligned['cluster'] = list(range(len(unaligned)))\n s = time.time()\n for i, row in unaligned.iterrows():\n cluster_df = pd.DataFrame(row).T\n alignment = AlignmentProfile(length, cluster_df,\n global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\nprint(f'{aligned_sqs_file} elapsed: {time.time() - start}')\nprint(f'{aligned_sqs_file} writing...')\nos.makedirs(output_profile_dir, exist_ok=True)\nfor alignment in against:\n filename = f'{output_profile_dir}/{alignment.ident}.prf'\n np.save(filename, alignment.profile)\n<mask token>\nfor alignment in against:\n itemized = alignment.seq_alignments\n num_cols = itemized.columns[1:]\n for col in num_cols:\n itemized[col] = itemized[col].astype(int).apply(str)\n itemized['alignment_actual'] = itemized[num_cols].agg(','.join, axis=1)\n itemized = itemized.drop(columns=num_cols)\n itemized.columns = ['index_df', 'alignment_actual']\n itemized['alignment'] = alignment.ident\n all_alignments.append(itemized)\n<mask token>\nmerged.drop(columns=['count', 'index_df']).to_csv(output_csv_file, index=False)\nprint(f'{aligned_sqs_file} done')\n",
"step-4": "<mask token>\nparser = argparse.ArgumentParser()\nparser.add_argument('--pools', default=4, type=int, help=\n 'Number of threads to use in aligning. Default 4. Optional.')\nparser.add_argument('--misses', default=5, type=float, help=\n 'Number of allowed substitutions/insertions/deletions in aligning a sequence of length k. For longer sequences, this is scaled. '\n )\nparser.add_argument('--aligned', default=None, type=str, help=\n 'Path to the output aligned directory. Required.')\nparser.add_argument('--overview', default=None, type=str, help=\n 'Path to the output description csv. Required. Pairs with <--aligned> directory.'\n )\nparser.add_argument('--k', default=-1, type=int, help=\n 'Size of the k-mer created by BCALM. Required.')\nparser.add_argument('--input', default=None, type=str, help=\n 'Path to the input file.')\nparser.set_defaults(all_sqs_result=False)\nargs = parser.parse_args([] if '__file__' not in globals() else None)\nbases = dict(A=0, C=1, G=2, T=3)\nbases['-'] = 4\nrev_bases = {v: k for k, v in bases.items()}\nglobal_alignment_ident_no = 0\noperations = {'.': 0, '-': 1, '|': 0}\n\n\nclass AlignmentProfile:\n\n def __init__(self, width, df, identifier):\n self.ident = identifier\n self.profile = np.zeros((5, width))\n self.repre_sq = ''\n self.seq_alignments = None\n self.seq_align_counter = -1\n self.calculate_profile(df)\n\n def calculate_profile(self, df):\n self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.\n shape[1], dtype=np.int8)) for index in df.index])\n unwrapped_sq = df['sq'].str.split('', expand=True)\n unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0],\n unwrapped_sq.columns[-1]])\n counts = np.stack(df['count'].values)\n for base in bases:\n a = unwrapped_sq != base\n newX = np.ma.array(counts, mask=a)\n new_counts = newX.sum(axis=0)\n self.profile[bases[base], :] += new_counts\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs])\n\n def add_sequence(self, new_sq, new_counts, nice, sq_index):\n offset = re.search(nice['target_aligned'].replace('-', ''), self.\n repre_sq).start(0)\n x = self.profile\n insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]\n for i, index in enumerate(insertions):\n if x.shape[1] >= index:\n value = 0\n else:\n value = x[:, index].sum()\n x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)\n self.seq_alignments.insert(loc=int(index + offset), column=self\n .seq_align_counter, value=1)\n self.seq_align_counter -= 1\n aligned_query = np.array(list(nice['query_aligned']))\n deletions = np.where(aligned_query == '-')[0]\n for i, index in enumerate(deletions):\n value = new_counts[index]\n new_counts = np.insert(new_counts, index, value, axis=0)\n i = offset\n for base, count in zip(aligned_query, new_counts):\n x[bases[base], i] += count\n i += 1\n self.profile = x\n added_alignment = -np.ones(self.profile.shape[1])\n for i, char in enumerate(nice['target_aligned']):\n if char == '-':\n added_alignment[offset + i] = 1\n else:\n added_alignment[offset + i] = 0\n self.seq_alignments.loc[-1] = [sq_index, *added_alignment]\n self.seq_alignments.index = self.seq_alignments.index + 1\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = ''.join([rev_bases[x] for x in maxs if rev_bases[x] !=\n '-'])\n\n\ndef dst_func(x, y):\n return (np.array(x) != np.array(y)).sum()\n\n\ndef read_alignment(filename):\n for line in open(filename):\n sq, count = line.strip('\\n').split(';')\n yield sq, np.array([int(x) for x in count.split(',')]), count\n\n\ndef cluster_group(df_group, l, dst=dst_func):\n sqs = df_group.reset_index()['sq']\n n = len(sqs)\n if n <= 1:\n return np.zeros(n)\n dst_matrix = np.zeros((n, n))\n for i in range(n):\n for j in range(i):\n d = dst(sqs[i], sqs[j])\n dst_matrix[i, j] = d\n dst_matrix[j, i] = d\n model = AgglomerativeClustering(distance_threshold=threshold * l,\n n_clusters=None, linkage='complete', affinity='precomputed')\n clusters = model.fit_predict(dst_matrix)\n return clusters\n\n\naligned_sqs_file = args.input\nk = args.k\nmisses = args.misses\npools = args.pools\nthreshold = misses / k\nif args.aligned is None:\n output_profile_dir = aligned_sqs_file + '_profiles'\nelse:\n output_profile_dir = args.aligned\nif args.overview is None:\n output_csv_file = aligned_sqs_file + '_overview.csv'\nelse:\n output_csv_file = args.overview\ndf = pd.DataFrame(read_alignment(aligned_sqs_file))\ndf.columns = ['sq', 'count', 'str_count']\ndf['length'] = df['sq'].str.len()\ngroups = df.groupby(by='length')\nunique_lengths = df['length'].sort_values(ascending=False).unique()\nagainst = []\nlongest = unique_lengths[0]\ndf_group = groups.get_group(longest).copy()\nclusters = cluster_group(df_group, longest)\ndf_group['cluster'] = clusters\nalignments = {}\nfor cluster, cluster_df in df_group.groupby(by='cluster'):\n alignment = AlignmentProfile(longest, cluster_df, global_alignment_ident_no\n )\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\nstart = time.time()\nwith Bar('Processing length groups...', max=len(unique_lengths) - 1) as bar:\n for length in unique_lengths[1:]:\n bar.next()\n df_group = groups.get_group(length).copy()\n\n def getDistanceAndAlignment(sq):\n maxval = np.floor(threshold * len(sq))\n min = np.inf\n min_target = None\n if maxval < 1:\n return min, min_target\n for target in against:\n align_res = edlib.align(sq, target.repre_sq, mode='HW',\n task='distance', k=maxval)\n if align_res['editDistance'] != -1:\n if min > align_res['editDistance']:\n if align_res['editDistance'] == 0:\n return align_res['editDistance'], target.ident\n min = align_res['editDistance']\n min_target = target\n if min_target is not None:\n min_target = min_target.ident\n return min, min_target\n x = length * threshold\n if length * threshold >= 1:\n with Pool(pools) as pool:\n result = pool.map(getDistanceAndAlignment, df_group['sq'])\n df_group['aligned'] = result\n aligned = df_group[df_group['aligned'] != (np.inf, None)]\n for index, row in aligned.iterrows():\n to = alignments[row['aligned'][1]]\n align_res = edlib.align(row.sq, to.repre_sq, mode='HW',\n task='path')\n nice = edlib.getNiceAlignment(align_res, row.sq, to.repre_sq)\n to.add_sequence(row.sq, row['count'], nice, index)\n unaligned = df_group[df_group['aligned'] == (np.inf, None)].copy()\n clusters = cluster_group(unaligned, length)\n unaligned['cluster'] = clusters\n for cluster, cluster_df in unaligned.groupby(by='cluster'):\n alignment = AlignmentProfile(length, cluster_df,\n global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\n else:\n df_group['aligned'] = [(np.inf, None) for _ in range(len(df_group))\n ]\n unaligned = df_group.copy()\n unaligned['cluster'] = list(range(len(unaligned)))\n s = time.time()\n for i, row in unaligned.iterrows():\n cluster_df = pd.DataFrame(row).T\n alignment = AlignmentProfile(length, cluster_df,\n global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\nprint(f'{aligned_sqs_file} elapsed: {time.time() - start}')\nprint(f'{aligned_sqs_file} writing...')\nos.makedirs(output_profile_dir, exist_ok=True)\nfor alignment in against:\n filename = f'{output_profile_dir}/{alignment.ident}.prf'\n np.save(filename, alignment.profile)\nall_alignments = []\nfor alignment in against:\n itemized = alignment.seq_alignments\n num_cols = itemized.columns[1:]\n for col in num_cols:\n itemized[col] = itemized[col].astype(int).apply(str)\n itemized['alignment_actual'] = itemized[num_cols].agg(','.join, axis=1)\n itemized = itemized.drop(columns=num_cols)\n itemized.columns = ['index_df', 'alignment_actual']\n itemized['alignment'] = alignment.ident\n all_alignments.append(itemized)\nall_alignments = pd.concat(all_alignments)\nmerged = pd.merge(all_alignments, df, left_on='index_df', right_index=True)\nmerged.drop(columns=['count', 'index_df']).to_csv(output_csv_file, index=False)\nprint(f'{aligned_sqs_file} done')\n",
"step-5": "import os\nimport re\nimport time\nimport numpy as np\nimport pandas as pd\nfrom sklearn.cluster import AgglomerativeClustering\nimport math\nimport edlib\nfrom progress.bar import IncrementalBar as Bar\nfrom multiprocessing import Pool\nimport argparse\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"--pools\",\n default=4,\n type=int,\n help=\"Number of threads to use in aligning. Default 4. Optional.\"\n )\nparser.add_argument(\"--misses\",\n default=5,\n type=float,\n help=\"Number of allowed substitutions/insertions/deletions in aligning a sequence of length k. \"\n \"For longer sequences, this is scaled. \"\n )\nparser.add_argument(\"--aligned\",\n default=None,\n type=str,\n help=\"Path to the output aligned directory. Required.\"\n )\nparser.add_argument(\"--overview\",\n default=None,\n type=str,\n help=\"Path to the output description csv. Required. Pairs with <--aligned> directory.\"\n )\nparser.add_argument(\"--k\",\n default=-1,\n type=int,\n help=\"Size of the k-mer created by BCALM. Required.\"\n )\nparser.add_argument(\"--input\",\n default=None,\n type=str,\n help=\"Path to the input file.\"\n )\nparser.set_defaults(all_sqs_result=False)\n\nargs = parser.parse_args([] if \"__file__\" not in globals() else None)\n\nbases = dict(A=0, C=1, G=2, T=3)\nbases['-'] = 4\nrev_bases = {v: k for k, v in bases.items()}\nglobal_alignment_ident_no = 0\n\n\noperations = {\n '.' : 0,\n '-' : 1,\n '|' : 0\n}\n\n\nclass AlignmentProfile:\n def __init__(self, width, df, identifier):\n self.ident = identifier\n\n self.profile = np.zeros((5, width))\n self.repre_sq = \"\"\n self.seq_alignments = None # this will be a pandas df\n self.seq_align_counter = -1\n\n self.calculate_profile(df)\n\n def calculate_profile(self, df):\n self.seq_alignments = pd.DataFrame([(index, *np.zeros(self.profile.shape[1], dtype=np.int8)) for index in df.index])\n\n unwrapped_sq = df['sq'].str.split('', expand=True)\n unwrapped_sq = unwrapped_sq.drop(columns=[unwrapped_sq.columns[0], unwrapped_sq.columns[-1]])\n\n counts = np.stack(df['count'].values)\n\n for base in bases:\n a = unwrapped_sq != base\n newX = np.ma.array(counts, mask=a)\n new_counts = newX.sum(axis=0)\n self.profile[bases[base], :] += new_counts\n\n # repre_sq\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = \"\".join([rev_bases[x] for x in maxs])\n\n def add_sequence(self, new_sq, new_counts, nice, sq_index):\n offset = re.search(nice['target_aligned'].replace('-', ''), self.repre_sq).start(0)\n x = self.profile\n # padding with the following number of observed positions (sum of all bases)\n\n # pad profile with insertions\n insertions = np.where(np.array(list(nice['target_aligned'])) == '-')[0]\n for i, index in enumerate(insertions):\n if x.shape[1] >= index:\n value = 0\n else:\n value = x[:, index].sum()\n x = np.insert(x, index + offset, [0, 0, 0, 0, value], axis=1)\n self.seq_alignments.insert(loc=int(index+offset), column=self.seq_align_counter, value=1)\n self.seq_align_counter -= 1\n\n # pad new counts with deletions\n aligned_query = np.array(list(nice['query_aligned']))\n deletions = np.where(aligned_query == '-')[0]\n for i, index in enumerate(deletions):\n value = new_counts[index]\n new_counts = np.insert(new_counts, index, value, axis=0)\n\n i = offset\n for base, count in zip(aligned_query, new_counts):\n x[bases[base], i] += count\n i += 1\n\n self.profile = x\n\n # store new sequence alignment\n added_alignment = -np.ones(self.profile.shape[1])\n for i, char in enumerate(nice['target_aligned']):\n if char == '-':\n added_alignment[offset + i] = 1\n else:\n added_alignment[offset + i] = 0\n self.seq_alignments.loc[-1] = [sq_index, *added_alignment] # adding a row\n self.seq_alignments.index = self.seq_alignments.index + 1 # shifting index\n\n # recalculate repre_sq -- the most probable one\n maxs = np.argmax(self.profile, axis=0)\n self.repre_sq = \"\".join([rev_bases[x] for x in maxs if rev_bases[x] != '-']) # '-' is removed from the sq\n\n\ndef dst_func(x, y):\n return (np.array(x) != np.array(y)).sum()\n\n\ndef read_alignment(filename):\n for line in open(filename):\n sq, count = line.strip('\\n').split(';')\n yield sq, np.array([int(x) for x in count.split(',')]), count\n\n\ndef cluster_group(df_group, l, dst=dst_func):\n sqs = df_group.reset_index()['sq']\n n = len(sqs)\n\n if n <= 1:\n return np.zeros(n)\n\n dst_matrix = np.zeros((n, n))\n\n for i in range(n):\n for j in range(i):\n d = dst(sqs[i], sqs[j])\n dst_matrix[i, j] = d\n dst_matrix[j, i] = d\n\n model = AgglomerativeClustering(distance_threshold=threshold * l,\n n_clusters=None,\n linkage='complete',\n affinity='precomputed')\n clusters = model.fit_predict(dst_matrix)\n return clusters\n\n\naligned_sqs_file = args.input\nk = args.k\nmisses = args.misses\npools = args.pools\n\nthreshold = misses / k\nif args.aligned is None:\n output_profile_dir = aligned_sqs_file + \"_profiles\"\nelse:\n output_profile_dir = args.aligned\n\nif args.overview is None:\n output_csv_file = aligned_sqs_file + \"_overview.csv\"\nelse:\n output_csv_file = args.overview\n\n# read\ndf = pd.DataFrame(read_alignment(aligned_sqs_file))\ndf.columns = ['sq', 'count', 'str_count']\ndf['length'] = df['sq'].str.len()\n# df['alignment'] = -1 # every aligned sq has an alignment identification\ngroups = df.groupby(by='length')\n\nunique_lengths = df['length'].sort_values(ascending=False).unique()\n\nagainst = []\n\nlongest = unique_lengths[0]\ndf_group = groups.get_group(longest).copy()\n\nclusters = cluster_group(df_group, longest)\ndf_group['cluster'] = clusters\n\nalignments = {\n}\n\nfor cluster, cluster_df in df_group.groupby(by='cluster'):\n alignment = AlignmentProfile(longest, cluster_df, global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n\n global_alignment_ident_no += 1\n against.append(alignment)\n\n # df.loc[df['sq'].isin(cluster_df['sq']), 'alignment'] = alignment.ident\n\n # to each sequence\n\n\nstart = time.time()\n\n# print(df.groupby(by='length').get_group(longest))\n# print(\"running on shorter\")\n\nwith Bar(\"Processing length groups...\", max=len(unique_lengths) - 1) as bar:\n for length in unique_lengths[1:]:\n bar.next()\n df_group = groups.get_group(length).copy()\n\n def getDistanceAndAlignment(sq):\n # this is a fallback, it should not happen\n maxval = np.floor(threshold * len(sq))\n\n min = np.inf\n min_target = None\n\n if maxval < 1:\n return min,min_target\n\n for target in against:\n align_res = edlib.align(sq, target.repre_sq, mode='HW', task='distance', k=maxval)\n if align_res['editDistance'] != -1:\n if min > align_res['editDistance']:\n if align_res['editDistance'] == 0:\n return align_res['editDistance'], target.ident\n\n min = align_res['editDistance']\n min_target = target\n\n if min_target is not None:\n min_target = min_target.ident\n\n return min, min_target\n\n x = length * threshold\n if length * threshold >= 1:\n # try align\n with Pool(pools) as pool:\n result = pool.map(getDistanceAndAlignment, df_group['sq'])\n df_group['aligned'] = result\n\n # add aligned to profiles\n aligned = df_group[df_group['aligned'] != (np.inf, None)]\n for index, row in aligned.iterrows():\n to = alignments[row['aligned'][1]]\n align_res = edlib.align(row.sq, to.repre_sq, mode='HW', task='path')\n nice = edlib.getNiceAlignment(align_res, row.sq, to.repre_sq)\n to.add_sequence(row.sq, row['count'], nice, index)\n # df.loc[df['sq'] == row.sq, 'alignment'] = to.ident\n\n # cluster unaligned, add to against\n unaligned = df_group[df_group['aligned'] == (np.inf, None)].copy()\n clusters = cluster_group(unaligned, length)\n unaligned['cluster'] = clusters\n\n for cluster, cluster_df in unaligned.groupby(by='cluster'):\n alignment = AlignmentProfile(length, cluster_df, global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\n else:\n # threshold is less than one, no clustering nor alignment takes place\n df_group[\"aligned\"] = [(np.inf, None) for _ in range(len(df_group))]\n unaligned = df_group.copy()\n unaligned[\"cluster\"] = list(range(len(unaligned)))\n # print(f\"pseudoclustering elapsed: {time.time() - s}\")\n\n s = time.time()\n for i, row in unaligned.iterrows():\n cluster_df = pd.DataFrame(row).T\n alignment = AlignmentProfile(length, cluster_df, global_alignment_ident_no)\n alignments[global_alignment_ident_no] = alignment\n global_alignment_ident_no += 1\n against.append(alignment)\n # print(f\"alignment elapsed: {time.time() - s}\")\n\n\nprint(f\"{aligned_sqs_file} elapsed: {time.time() - start}\")\nprint(f\"{aligned_sqs_file} writing...\")\n\n\nos.makedirs(output_profile_dir, exist_ok=True)\nfor alignment in against:\n filename = f\"{output_profile_dir}/{alignment.ident}.prf\"\n np.save(filename, alignment.profile)\n\n# get actual alignment for each sq\nall_alignments = []\nfor alignment in against:\n itemized = alignment.seq_alignments\n num_cols = itemized.columns[1:]\n # index_col = itemized.columns[0]\n # translate to sth readable\n for col in num_cols:\n itemized[col] = itemized[col].astype(int).apply(str)\n\n itemized['alignment_actual'] = itemized[num_cols].agg(','.join, axis=1) # todo maybe cigar?\n itemized = itemized.drop(columns=num_cols)\n itemized.columns = ['index_df', 'alignment_actual']\n itemized['alignment'] = alignment.ident\n all_alignments.append(itemized)\n\nall_alignments = pd.concat(all_alignments)\nmerged = pd.merge(all_alignments, df, left_on='index_df', right_index=True)\n\n\n# write sequences in df\nmerged.drop(columns=['count', 'index_df']).to_csv(output_csv_file, index=False)\nprint(f\"{aligned_sqs_file} done\")\n",
"step-ids": [
5,
7,
8,
9,
11
]
}
|
[
5,
7,
8,
9,
11
] |
<|reserved_special_token_0|>
class Window:
def __init__(self, world, xyw_min=None, xyw_max=None):
self.world = world
if xyw_min is None or xyw_max is None:
self.xyw_min = -100, -100
self.xyw_max = 100, 100
else:
if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:
raise Exception(
'O param xyw_min deve ser uma tupla de 2 valores.')
try:
self.xyw_min = float(xyw_min[0]), float(xyw_min[1])
except Exception:
raise Exception(
'As coordenadas xyw_min devem ser pares de números.')
if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:
raise Exception(
'O param xyw_max deve ser uma tupla de 2 valores.')
try:
self.xyw_max = float(xyw_max[0]), float(xyw_max[1])
except Exception:
raise Exception(
'As coordenadas xyw_max devem ser pares de números.')
self.xyw_1 = self.xyw_min
self.xyw_2 = self.xyw_max[0], self.xyw_min[1]
self.xyw_3 = self.xyw_min[0], self.xyw_max[1]
self.xyw_4 = self.xyw_max
self.center = self.calcCenter()
self.newCenter = self.center
self.fatorMovimento = 10
self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
self.degrees = 0
self.scn()
def set_xyw_min(self, xmin, ymin):
self.xyw_min = xmin, ymin
def set_xyw_max(self, xmax, ymax):
self.xyw_max = xmax, ymax
<|reserved_special_token_0|>
def getCoords(self) ->(float, float, float, float):
return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[
1]
<|reserved_special_token_0|>
def moveUp(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * sin, dy=self.
fatorMovimento * cos)
def moveDown(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.
fatorMovimento * cos)
def moveRight(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.
fatorMovimento * sin)
def moveLeft(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * cos, dy=self.
fatorMovimento * sin)
<|reserved_special_token_0|>
def zoomIn(self):
self._scale(scale=0.9)
self.fatorMovimento = self.fatorMovimento * 0.9
def zoomOut(self):
self._scale(scale=1.1)
self.fatorMovimento = self.fatorMovimento * 1.1
def _scale(self, scale=1):
cx, cy = self.newCenter
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [
self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],
1], [self.xyw_4[0], self.xyw_4[1], 1]])
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -
1 * cy, 1]])
scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])
translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])
transformations = numpy.matmul(translate_matrix_1, scale_matrix)
transformations = numpy.matmul(transformations, translate_matrix_2)
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,
transformations)
self.xyw_1 = xyw_1[0], xyw_1[1]
self.xyw_2 = xyw_2[0], xyw_2[1]
self.xyw_3 = xyw_3[0], xyw_3[1]
self.xyw_4 = xyw_4[0], xyw_4[1]
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
self.newCenter = self.calcCenter()
self.scn()
def rotateRight(self, angle):
self._rotate(360 - angle)
def rotateLeft(self, angle):
self._rotate(angle)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def applySCN(self, x, y):
point_coords = numpy.array([x, y, 1])
final_coords = numpy.matmul(point_coords, self.window_scn)
return final_coords[0], final_coords[1]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Window:
def __init__(self, world, xyw_min=None, xyw_max=None):
self.world = world
if xyw_min is None or xyw_max is None:
self.xyw_min = -100, -100
self.xyw_max = 100, 100
else:
if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:
raise Exception(
'O param xyw_min deve ser uma tupla de 2 valores.')
try:
self.xyw_min = float(xyw_min[0]), float(xyw_min[1])
except Exception:
raise Exception(
'As coordenadas xyw_min devem ser pares de números.')
if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:
raise Exception(
'O param xyw_max deve ser uma tupla de 2 valores.')
try:
self.xyw_max = float(xyw_max[0]), float(xyw_max[1])
except Exception:
raise Exception(
'As coordenadas xyw_max devem ser pares de números.')
self.xyw_1 = self.xyw_min
self.xyw_2 = self.xyw_max[0], self.xyw_min[1]
self.xyw_3 = self.xyw_min[0], self.xyw_max[1]
self.xyw_4 = self.xyw_max
self.center = self.calcCenter()
self.newCenter = self.center
self.fatorMovimento = 10
self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
self.degrees = 0
self.scn()
def set_xyw_min(self, xmin, ymin):
self.xyw_min = xmin, ymin
def set_xyw_max(self, xmax, ymax):
self.xyw_max = xmax, ymax
<|reserved_special_token_0|>
def getCoords(self) ->(float, float, float, float):
return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[
1]
<|reserved_special_token_0|>
def moveUp(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * sin, dy=self.
fatorMovimento * cos)
def moveDown(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.
fatorMovimento * cos)
def moveRight(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.
fatorMovimento * sin)
def moveLeft(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * cos, dy=self.
fatorMovimento * sin)
def _translate(self, dx=0, dy=0):
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [
self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],
1], [self.xyw_4[0], self.xyw_4[1], 1]])
translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,
translate_matrix)
self.xyw_1 = xyw_1[0], xyw_1[1]
self.xyw_2 = xyw_2[0], xyw_2[1]
self.xyw_3 = xyw_3[0], xyw_3[1]
self.xyw_4 = xyw_4[0], xyw_4[1]
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
self.newCenter = self.calcCenter()
self.scn()
def zoomIn(self):
self._scale(scale=0.9)
self.fatorMovimento = self.fatorMovimento * 0.9
def zoomOut(self):
self._scale(scale=1.1)
self.fatorMovimento = self.fatorMovimento * 1.1
def _scale(self, scale=1):
cx, cy = self.newCenter
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [
self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],
1], [self.xyw_4[0], self.xyw_4[1], 1]])
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -
1 * cy, 1]])
scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])
translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])
transformations = numpy.matmul(translate_matrix_1, scale_matrix)
transformations = numpy.matmul(transformations, translate_matrix_2)
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,
transformations)
self.xyw_1 = xyw_1[0], xyw_1[1]
self.xyw_2 = xyw_2[0], xyw_2[1]
self.xyw_3 = xyw_3[0], xyw_3[1]
self.xyw_4 = xyw_4[0], xyw_4[1]
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
self.newCenter = self.calcCenter()
self.scn()
def rotateRight(self, angle):
self._rotate(360 - angle)
def rotateLeft(self, angle):
self._rotate(angle)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def applySCN(self, x, y):
point_coords = numpy.array([x, y, 1])
final_coords = numpy.matmul(point_coords, self.window_scn)
return final_coords[0], final_coords[1]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Window:
def __init__(self, world, xyw_min=None, xyw_max=None):
self.world = world
if xyw_min is None or xyw_max is None:
self.xyw_min = -100, -100
self.xyw_max = 100, 100
else:
if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:
raise Exception(
'O param xyw_min deve ser uma tupla de 2 valores.')
try:
self.xyw_min = float(xyw_min[0]), float(xyw_min[1])
except Exception:
raise Exception(
'As coordenadas xyw_min devem ser pares de números.')
if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:
raise Exception(
'O param xyw_max deve ser uma tupla de 2 valores.')
try:
self.xyw_max = float(xyw_max[0]), float(xyw_max[1])
except Exception:
raise Exception(
'As coordenadas xyw_max devem ser pares de números.')
self.xyw_1 = self.xyw_min
self.xyw_2 = self.xyw_max[0], self.xyw_min[1]
self.xyw_3 = self.xyw_min[0], self.xyw_max[1]
self.xyw_4 = self.xyw_max
self.center = self.calcCenter()
self.newCenter = self.center
self.fatorMovimento = 10
self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
self.degrees = 0
self.scn()
def set_xyw_min(self, xmin, ymin):
self.xyw_min = xmin, ymin
def set_xyw_max(self, xmax, ymax):
self.xyw_max = xmax, ymax
def calcCenter(self) ->(float, float):
return (self.xyw_min[0] + self.xyw_max[0]) / 2, (self.xyw_min[1] +
self.xyw_max[1]) / 2
def getCoords(self) ->(float, float, float, float):
return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[
1]
def getWindowDimensions(self) ->(float, float):
xyw1 = numpy.array([self.xyw_1[0], self.xyw_1[1]])
xyw2 = numpy.array([self.xyw_2[0], self.xyw_2[1]])
xyw3 = numpy.array([self.xyw_3[0], self.xyw_3[1]])
return numpy.linalg.norm(xyw2 - xyw1), numpy.linalg.norm(xyw3 - xyw1)
def moveUp(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * sin, dy=self.
fatorMovimento * cos)
def moveDown(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.
fatorMovimento * cos)
def moveRight(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.
fatorMovimento * sin)
def moveLeft(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * cos, dy=self.
fatorMovimento * sin)
def _translate(self, dx=0, dy=0):
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [
self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],
1], [self.xyw_4[0], self.xyw_4[1], 1]])
translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,
translate_matrix)
self.xyw_1 = xyw_1[0], xyw_1[1]
self.xyw_2 = xyw_2[0], xyw_2[1]
self.xyw_3 = xyw_3[0], xyw_3[1]
self.xyw_4 = xyw_4[0], xyw_4[1]
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
self.newCenter = self.calcCenter()
self.scn()
def zoomIn(self):
self._scale(scale=0.9)
self.fatorMovimento = self.fatorMovimento * 0.9
def zoomOut(self):
self._scale(scale=1.1)
self.fatorMovimento = self.fatorMovimento * 1.1
def _scale(self, scale=1):
cx, cy = self.newCenter
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [
self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],
1], [self.xyw_4[0], self.xyw_4[1], 1]])
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -
1 * cy, 1]])
scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])
translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])
transformations = numpy.matmul(translate_matrix_1, scale_matrix)
transformations = numpy.matmul(transformations, translate_matrix_2)
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,
transformations)
self.xyw_1 = xyw_1[0], xyw_1[1]
self.xyw_2 = xyw_2[0], xyw_2[1]
self.xyw_3 = xyw_3[0], xyw_3[1]
self.xyw_4 = xyw_4[0], xyw_4[1]
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
self.newCenter = self.calcCenter()
self.scn()
def rotateRight(self, angle):
self._rotate(360 - angle)
def rotateLeft(self, angle):
self._rotate(angle)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def applySCN(self, x, y):
point_coords = numpy.array([x, y, 1])
final_coords = numpy.matmul(point_coords, self.window_scn)
return final_coords[0], final_coords[1]
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Window:
def __init__(self, world, xyw_min=None, xyw_max=None):
self.world = world
if xyw_min is None or xyw_max is None:
self.xyw_min = -100, -100
self.xyw_max = 100, 100
else:
if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:
raise Exception(
'O param xyw_min deve ser uma tupla de 2 valores.')
try:
self.xyw_min = float(xyw_min[0]), float(xyw_min[1])
except Exception:
raise Exception(
'As coordenadas xyw_min devem ser pares de números.')
if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:
raise Exception(
'O param xyw_max deve ser uma tupla de 2 valores.')
try:
self.xyw_max = float(xyw_max[0]), float(xyw_max[1])
except Exception:
raise Exception(
'As coordenadas xyw_max devem ser pares de números.')
self.xyw_1 = self.xyw_min
self.xyw_2 = self.xyw_max[0], self.xyw_min[1]
self.xyw_3 = self.xyw_min[0], self.xyw_max[1]
self.xyw_4 = self.xyw_max
self.center = self.calcCenter()
self.newCenter = self.center
self.fatorMovimento = 10
self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
self.degrees = 0
self.scn()
def set_xyw_min(self, xmin, ymin):
self.xyw_min = xmin, ymin
def set_xyw_max(self, xmax, ymax):
self.xyw_max = xmax, ymax
def calcCenter(self) ->(float, float):
return (self.xyw_min[0] + self.xyw_max[0]) / 2, (self.xyw_min[1] +
self.xyw_max[1]) / 2
def getCoords(self) ->(float, float, float, float):
return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[
1]
def getWindowDimensions(self) ->(float, float):
xyw1 = numpy.array([self.xyw_1[0], self.xyw_1[1]])
xyw2 = numpy.array([self.xyw_2[0], self.xyw_2[1]])
xyw3 = numpy.array([self.xyw_3[0], self.xyw_3[1]])
return numpy.linalg.norm(xyw2 - xyw1), numpy.linalg.norm(xyw3 - xyw1)
def moveUp(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * sin, dy=self.
fatorMovimento * cos)
def moveDown(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.
fatorMovimento * cos)
def moveRight(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.
fatorMovimento * sin)
def moveLeft(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * cos, dy=self.
fatorMovimento * sin)
def _translate(self, dx=0, dy=0):
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [
self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],
1], [self.xyw_4[0], self.xyw_4[1], 1]])
translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,
translate_matrix)
self.xyw_1 = xyw_1[0], xyw_1[1]
self.xyw_2 = xyw_2[0], xyw_2[1]
self.xyw_3 = xyw_3[0], xyw_3[1]
self.xyw_4 = xyw_4[0], xyw_4[1]
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
self.newCenter = self.calcCenter()
self.scn()
def zoomIn(self):
self._scale(scale=0.9)
self.fatorMovimento = self.fatorMovimento * 0.9
def zoomOut(self):
self._scale(scale=1.1)
self.fatorMovimento = self.fatorMovimento * 1.1
def _scale(self, scale=1):
cx, cy = self.newCenter
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [
self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],
1], [self.xyw_4[0], self.xyw_4[1], 1]])
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -
1 * cy, 1]])
scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])
translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])
transformations = numpy.matmul(translate_matrix_1, scale_matrix)
transformations = numpy.matmul(transformations, translate_matrix_2)
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,
transformations)
self.xyw_1 = xyw_1[0], xyw_1[1]
self.xyw_2 = xyw_2[0], xyw_2[1]
self.xyw_3 = xyw_3[0], xyw_3[1]
self.xyw_4 = xyw_4[0], xyw_4[1]
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
self.newCenter = self.calcCenter()
self.scn()
def rotateRight(self, angle):
self._rotate(360 - angle)
def rotateLeft(self, angle):
self._rotate(angle)
def _rotate(self, angle=0):
self.degrees = (self.degrees + angle) % 360
cx, cy = self.newCenter
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [
self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],
1], [self.xyw_4[0], self.xyw_4[1], 1]])
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -
1 * cy, 1]])
radians = numpy.radians(angle)
sin = numpy.sin(radians)
cos = numpy.cos(radians)
rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])
translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])
transformations = numpy.matmul(translate_matrix_1, rotate_matrix)
transformations = numpy.matmul(transformations, translate_matrix_2)
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,
transformations)
self.xyw_1 = xyw_1[0], xyw_1[1]
self.xyw_2 = xyw_2[0], xyw_2[1]
self.xyw_3 = xyw_3[0], xyw_3[1]
self.xyw_4 = xyw_4[0], xyw_4[1]
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
self.newCenter = self.calcCenter()
self.scn()
def scn(self):
cx, cy = self.newCenter
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -
1 * cy, 1]])
radians = numpy.radians(-1 * self.degrees)
sin = numpy.sin(radians)
cos = numpy.cos(radians)
rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])
length, height = self.getWindowDimensions()
sx = 1 / (length / 2)
sy = 1 / (height / 2)
scale_matrix = numpy.array([[sx, 0, 0], [0, sy, 0], [0, 0, 1]])
scn = numpy.matmul(translate_matrix_1, rotate_matrix)
self.window_scn = numpy.matmul(scn, scale_matrix)
def applySCN(self, x, y):
point_coords = numpy.array([x, y, 1])
final_coords = numpy.matmul(point_coords, self.window_scn)
return final_coords[0], final_coords[1]
<|reserved_special_token_1|>
import numpy
from math import cos, sin, radians, tan
class Window:
# construtor
def __init__(self, world, xyw_min=None, xyw_max=None):
self.world = world
# caso em q é None
if xyw_min is None or xyw_max is None:
self.xyw_min = (-100, -100)
self.xyw_max = (100, 100)
# caso em q n é None
else:
if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:
raise Exception('O param xyw_min deve ser uma tupla de 2 valores.')
try:
self.xyw_min = (float(xyw_min[0]), float(xyw_min[1]))
except Exception:
raise Exception('As coordenadas xyw_min devem ser pares de números.')
if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:
raise Exception('O param xyw_max deve ser uma tupla de 2 valores.')
try:
self.xyw_max = (float(xyw_max[0]), float(xyw_max[1]))
except Exception:
raise Exception('As coordenadas xyw_max devem ser pares de números.')
self.xyw_1 = self.xyw_min
self.xyw_2 = (self.xyw_max[0], self.xyw_min[1])
self.xyw_3 = (self.xyw_min[0], self.xyw_max[1])
self.xyw_4 = self.xyw_max
# define o centro original da window(attr q pode ser usado para trazer a view de volta ao seu centro original)
self.center = self.calcCenter()
# define o novo centro(var que pode ser utilizada em futuros calculos envolvendo o centro da window)
self.newCenter = self.center
self.fatorMovimento = 10
self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
# inicializa scn da window
self.degrees = 0
self.scn()
def set_xyw_min(self, xmin, ymin):
self.xyw_min = (xmin, ymin)
def set_xyw_max(self, xmax, ymax):
self.xyw_max = (xmax, ymax)
# retorna as coordenadas (x,y) do centro da window
def calcCenter(self) -> (float, float):
return (self.xyw_min[0] + self.xyw_max[0]) / 2, (self.xyw_min[1] + self.xyw_max[1]) / 2
# retorna as coordenadas do canto inferior esquerdo e canto superior direito da window
def getCoords(self) -> (float, float, float, float):
return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[1]
# retorna a largura e profundidade da window
def getWindowDimensions(self) -> (float, float):
xyw1 = numpy.array([self.xyw_1[0], self.xyw_1[1]])
xyw2 = numpy.array([self.xyw_2[0], self.xyw_2[1]])
xyw3 = numpy.array([self.xyw_3[0], self.xyw_3[1]])
return numpy.linalg.norm(xyw2 - xyw1), numpy.linalg.norm(xyw3 - xyw1)
# translada a window para cima, do ponto de vista do usuario
def moveUp(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * sin, dy=self.fatorMovimento * cos)
# translada a window para baixo, do ponto de vista do usuario
def moveDown(self):
rad_angle = numpy.radians(self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=(-1) * self.fatorMovimento * sin, dy=(-1) * self.fatorMovimento * cos)
# translada a window para direita, do ponto de vista do usuario
def moveRight(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=(-1) * self.fatorMovimento * cos, dy=(-1) * self.fatorMovimento * sin)
# translada a window para esquerda, do ponto de vista do usuario
def moveLeft(self):
rad_angle = numpy.radians(180 - self.degrees)
sin = numpy.sin(rad_angle)
cos = numpy.cos(rad_angle)
self._translate(dx=self.fatorMovimento * cos, dy=self.fatorMovimento * sin)
# realiza a translaçao da window
def _translate(self, dx=0, dy=0):
# cria a matriz de translacao do obj para um dx e dy qualquer
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],
[self.xyw_2[0], self.xyw_2[1], 1],
[self.xyw_3[0], self.xyw_3[1], 1],
[self.xyw_4[0], self.xyw_4[1], 1]])
# realiza a translacao
translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])
# atualiza a window
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, translate_matrix)
self.xyw_1 = (xyw_1[0], xyw_1[1])
self.xyw_2 = (xyw_2[0], xyw_2[1])
self.xyw_3 = (xyw_3[0], xyw_3[1])
self.xyw_4 = (xyw_4[0], xyw_4[1])
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
# atualiza o centro
self.newCenter = self.calcCenter()
# atualiza scn
self.scn()
# Encolhe a window
def zoomIn(self):
self._scale(scale=0.9)
self.fatorMovimento = self.fatorMovimento * 0.9
# Aumenta a window
def zoomOut(self):
self._scale(scale=1.1)
self.fatorMovimento = self.fatorMovimento * 1.1
# Escalona a window
def _scale(self, scale=1):
# centro do obj
cx, cy = self.newCenter
# coords do mundo
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],
[self.xyw_2[0], self.xyw_2[1], 1],
[self.xyw_3[0], self.xyw_3[1], 1],
[self.xyw_4[0], self.xyw_4[1], 1]])
# ajusta o centro do mundo com o obj
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])
# realiza o escalonamento(num sei se esse e o termo correto)
scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])
# reverte o ajuste do centro do mundo com o obj
translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])
# monta uma matriz que aplica todas as transformacoes
transformations = numpy.matmul(translate_matrix_1, scale_matrix)
transformations = numpy.matmul(transformations, translate_matrix_2)
# aplica as transformacoes
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, transformations)
# atualiza xyw_min/max
self.xyw_1 = (xyw_1[0], xyw_1[1])
self.xyw_2 = (xyw_2[0], xyw_2[1])
self.xyw_3 = (xyw_3[0], xyw_3[1])
self.xyw_4 = (xyw_4[0], xyw_4[1])
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
# atualiza o centro
self.newCenter = self.calcCenter()
# atualiza scn
self.scn()
# Rotaciona a window no sentido horario
def rotateRight(self, angle):
# 360 - 10 = 350
self._rotate(360 - angle)
# Rotaciona a window no sentido anti-horario
def rotateLeft(self, angle):
self._rotate(angle)
# Rotaciona a window em relaçao ao seu proprio centro
def _rotate(self, angle=0):
self.degrees = (self.degrees + angle) % 360
# centro do obj
cx, cy = self.newCenter
# coords do mundo
window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],
[self.xyw_2[0], self.xyw_2[1], 1],
[self.xyw_3[0], self.xyw_3[1], 1],
[self.xyw_4[0], self.xyw_4[1], 1]])
# ajusta o centro do mundo com o obj
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])
# realiza a rotacao
radians = numpy.radians(angle)
sin = numpy.sin(radians)
cos = numpy.cos(radians)
rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])
# reverte a transformacao feita
translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])
# gera a matriz de transformacao de rotacao
transformations = numpy.matmul(translate_matrix_1, rotate_matrix)
transformations = numpy.matmul(transformations, translate_matrix_2)
# aplica as transformacoes
xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, transformations)
# atualiza xyw_min/max
self.xyw_1 = (xyw_1[0], xyw_1[1])
self.xyw_2 = (xyw_2[0], xyw_2[1])
self.xyw_3 = (xyw_3[0], xyw_3[1])
self.xyw_4 = (xyw_4[0], xyw_4[1])
self.xyw_min = self.xyw_1
self.xyw_max = self.xyw_4
# atualiza o centro
self.newCenter = self.calcCenter()
# atualiza scn
self.scn()
# Calcula a matriz de transformaçao de sistemas de coordenadas da window
def scn(self):
# centro do obj
cx, cy = self.newCenter
# ajusta o centro do mundo com o obj
translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])
# pega ao INVERSO da rotacao atual da window
radians = numpy.radians((-1) * self.degrees)
sin = numpy.sin(radians)
cos = numpy.cos(radians)
# rotaciona
rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])
length, height = self.getWindowDimensions()
sx = 1 / (length / 2)
sy = 1 / (height / 2)
# realiza o escalonamento(num sei se esse e o termo correto)
scale_matrix = numpy.array([[sx, 0, 0], [0, sy, 0], [0, 0, 1]])
# gera a matriz de conversao para scn da window
scn = numpy.matmul(translate_matrix_1, rotate_matrix)
self.window_scn = numpy.matmul(scn, scale_matrix)
# Aplica a matriz de transformaçao de sistema de coordenadas da window a um ponto qualquer
def applySCN(self, x, y):
point_coords = numpy.array([x, y, 1])
final_coords = numpy.matmul(point_coords, self.window_scn)
return final_coords[0], final_coords[1]
|
flexible
|
{
"blob_id": "deb0cd745eae97a6dbabdfab37e1c6d75e5372f0",
"index": 8422,
"step-1": "<mask token>\n\n\nclass Window:\n\n def __init__(self, world, xyw_min=None, xyw_max=None):\n self.world = world\n if xyw_min is None or xyw_max is None:\n self.xyw_min = -100, -100\n self.xyw_max = 100, 100\n else:\n if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:\n raise Exception(\n 'O param xyw_min deve ser uma tupla de 2 valores.')\n try:\n self.xyw_min = float(xyw_min[0]), float(xyw_min[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_min devem ser pares de números.')\n if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:\n raise Exception(\n 'O param xyw_max deve ser uma tupla de 2 valores.')\n try:\n self.xyw_max = float(xyw_max[0]), float(xyw_max[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_max devem ser pares de números.')\n self.xyw_1 = self.xyw_min\n self.xyw_2 = self.xyw_max[0], self.xyw_min[1]\n self.xyw_3 = self.xyw_min[0], self.xyw_max[1]\n self.xyw_4 = self.xyw_max\n self.center = self.calcCenter()\n self.newCenter = self.center\n self.fatorMovimento = 10\n self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n self.degrees = 0\n self.scn()\n\n def set_xyw_min(self, xmin, ymin):\n self.xyw_min = xmin, ymin\n\n def set_xyw_max(self, xmax, ymax):\n self.xyw_max = xmax, ymax\n <mask token>\n\n def getCoords(self) ->(float, float, float, float):\n return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[\n 1]\n <mask token>\n\n def moveUp(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * sin, dy=self.\n fatorMovimento * cos)\n\n def moveDown(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.\n fatorMovimento * cos)\n\n def moveRight(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.\n fatorMovimento * sin)\n\n def moveLeft(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * cos, dy=self.\n fatorMovimento * sin)\n <mask token>\n\n def zoomIn(self):\n self._scale(scale=0.9)\n self.fatorMovimento = self.fatorMovimento * 0.9\n\n def zoomOut(self):\n self._scale(scale=1.1)\n self.fatorMovimento = self.fatorMovimento * 1.1\n\n def _scale(self, scale=1):\n cx, cy = self.newCenter\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n transformations = numpy.matmul(translate_matrix_1, scale_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n transformations)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def rotateRight(self, angle):\n self._rotate(360 - angle)\n\n def rotateLeft(self, angle):\n self._rotate(angle)\n <mask token>\n <mask token>\n\n def applySCN(self, x, y):\n point_coords = numpy.array([x, y, 1])\n final_coords = numpy.matmul(point_coords, self.window_scn)\n return final_coords[0], final_coords[1]\n",
"step-2": "<mask token>\n\n\nclass Window:\n\n def __init__(self, world, xyw_min=None, xyw_max=None):\n self.world = world\n if xyw_min is None or xyw_max is None:\n self.xyw_min = -100, -100\n self.xyw_max = 100, 100\n else:\n if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:\n raise Exception(\n 'O param xyw_min deve ser uma tupla de 2 valores.')\n try:\n self.xyw_min = float(xyw_min[0]), float(xyw_min[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_min devem ser pares de números.')\n if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:\n raise Exception(\n 'O param xyw_max deve ser uma tupla de 2 valores.')\n try:\n self.xyw_max = float(xyw_max[0]), float(xyw_max[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_max devem ser pares de números.')\n self.xyw_1 = self.xyw_min\n self.xyw_2 = self.xyw_max[0], self.xyw_min[1]\n self.xyw_3 = self.xyw_min[0], self.xyw_max[1]\n self.xyw_4 = self.xyw_max\n self.center = self.calcCenter()\n self.newCenter = self.center\n self.fatorMovimento = 10\n self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n self.degrees = 0\n self.scn()\n\n def set_xyw_min(self, xmin, ymin):\n self.xyw_min = xmin, ymin\n\n def set_xyw_max(self, xmax, ymax):\n self.xyw_max = xmax, ymax\n <mask token>\n\n def getCoords(self) ->(float, float, float, float):\n return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[\n 1]\n <mask token>\n\n def moveUp(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * sin, dy=self.\n fatorMovimento * cos)\n\n def moveDown(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.\n fatorMovimento * cos)\n\n def moveRight(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.\n fatorMovimento * sin)\n\n def moveLeft(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * cos, dy=self.\n fatorMovimento * sin)\n\n def _translate(self, dx=0, dy=0):\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n translate_matrix)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def zoomIn(self):\n self._scale(scale=0.9)\n self.fatorMovimento = self.fatorMovimento * 0.9\n\n def zoomOut(self):\n self._scale(scale=1.1)\n self.fatorMovimento = self.fatorMovimento * 1.1\n\n def _scale(self, scale=1):\n cx, cy = self.newCenter\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n transformations = numpy.matmul(translate_matrix_1, scale_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n transformations)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def rotateRight(self, angle):\n self._rotate(360 - angle)\n\n def rotateLeft(self, angle):\n self._rotate(angle)\n <mask token>\n <mask token>\n\n def applySCN(self, x, y):\n point_coords = numpy.array([x, y, 1])\n final_coords = numpy.matmul(point_coords, self.window_scn)\n return final_coords[0], final_coords[1]\n",
"step-3": "<mask token>\n\n\nclass Window:\n\n def __init__(self, world, xyw_min=None, xyw_max=None):\n self.world = world\n if xyw_min is None or xyw_max is None:\n self.xyw_min = -100, -100\n self.xyw_max = 100, 100\n else:\n if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:\n raise Exception(\n 'O param xyw_min deve ser uma tupla de 2 valores.')\n try:\n self.xyw_min = float(xyw_min[0]), float(xyw_min[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_min devem ser pares de números.')\n if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:\n raise Exception(\n 'O param xyw_max deve ser uma tupla de 2 valores.')\n try:\n self.xyw_max = float(xyw_max[0]), float(xyw_max[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_max devem ser pares de números.')\n self.xyw_1 = self.xyw_min\n self.xyw_2 = self.xyw_max[0], self.xyw_min[1]\n self.xyw_3 = self.xyw_min[0], self.xyw_max[1]\n self.xyw_4 = self.xyw_max\n self.center = self.calcCenter()\n self.newCenter = self.center\n self.fatorMovimento = 10\n self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n self.degrees = 0\n self.scn()\n\n def set_xyw_min(self, xmin, ymin):\n self.xyw_min = xmin, ymin\n\n def set_xyw_max(self, xmax, ymax):\n self.xyw_max = xmax, ymax\n\n def calcCenter(self) ->(float, float):\n return (self.xyw_min[0] + self.xyw_max[0]) / 2, (self.xyw_min[1] +\n self.xyw_max[1]) / 2\n\n def getCoords(self) ->(float, float, float, float):\n return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[\n 1]\n\n def getWindowDimensions(self) ->(float, float):\n xyw1 = numpy.array([self.xyw_1[0], self.xyw_1[1]])\n xyw2 = numpy.array([self.xyw_2[0], self.xyw_2[1]])\n xyw3 = numpy.array([self.xyw_3[0], self.xyw_3[1]])\n return numpy.linalg.norm(xyw2 - xyw1), numpy.linalg.norm(xyw3 - xyw1)\n\n def moveUp(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * sin, dy=self.\n fatorMovimento * cos)\n\n def moveDown(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.\n fatorMovimento * cos)\n\n def moveRight(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.\n fatorMovimento * sin)\n\n def moveLeft(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * cos, dy=self.\n fatorMovimento * sin)\n\n def _translate(self, dx=0, dy=0):\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n translate_matrix)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def zoomIn(self):\n self._scale(scale=0.9)\n self.fatorMovimento = self.fatorMovimento * 0.9\n\n def zoomOut(self):\n self._scale(scale=1.1)\n self.fatorMovimento = self.fatorMovimento * 1.1\n\n def _scale(self, scale=1):\n cx, cy = self.newCenter\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n transformations = numpy.matmul(translate_matrix_1, scale_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n transformations)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def rotateRight(self, angle):\n self._rotate(360 - angle)\n\n def rotateLeft(self, angle):\n self._rotate(angle)\n <mask token>\n <mask token>\n\n def applySCN(self, x, y):\n point_coords = numpy.array([x, y, 1])\n final_coords = numpy.matmul(point_coords, self.window_scn)\n return final_coords[0], final_coords[1]\n",
"step-4": "<mask token>\n\n\nclass Window:\n\n def __init__(self, world, xyw_min=None, xyw_max=None):\n self.world = world\n if xyw_min is None or xyw_max is None:\n self.xyw_min = -100, -100\n self.xyw_max = 100, 100\n else:\n if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:\n raise Exception(\n 'O param xyw_min deve ser uma tupla de 2 valores.')\n try:\n self.xyw_min = float(xyw_min[0]), float(xyw_min[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_min devem ser pares de números.')\n if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:\n raise Exception(\n 'O param xyw_max deve ser uma tupla de 2 valores.')\n try:\n self.xyw_max = float(xyw_max[0]), float(xyw_max[1])\n except Exception:\n raise Exception(\n 'As coordenadas xyw_max devem ser pares de números.')\n self.xyw_1 = self.xyw_min\n self.xyw_2 = self.xyw_max[0], self.xyw_min[1]\n self.xyw_3 = self.xyw_min[0], self.xyw_max[1]\n self.xyw_4 = self.xyw_max\n self.center = self.calcCenter()\n self.newCenter = self.center\n self.fatorMovimento = 10\n self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n self.degrees = 0\n self.scn()\n\n def set_xyw_min(self, xmin, ymin):\n self.xyw_min = xmin, ymin\n\n def set_xyw_max(self, xmax, ymax):\n self.xyw_max = xmax, ymax\n\n def calcCenter(self) ->(float, float):\n return (self.xyw_min[0] + self.xyw_max[0]) / 2, (self.xyw_min[1] +\n self.xyw_max[1]) / 2\n\n def getCoords(self) ->(float, float, float, float):\n return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[\n 1]\n\n def getWindowDimensions(self) ->(float, float):\n xyw1 = numpy.array([self.xyw_1[0], self.xyw_1[1]])\n xyw2 = numpy.array([self.xyw_2[0], self.xyw_2[1]])\n xyw3 = numpy.array([self.xyw_3[0], self.xyw_3[1]])\n return numpy.linalg.norm(xyw2 - xyw1), numpy.linalg.norm(xyw3 - xyw1)\n\n def moveUp(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * sin, dy=self.\n fatorMovimento * cos)\n\n def moveDown(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * sin, dy=-1 * self.\n fatorMovimento * cos)\n\n def moveRight(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=-1 * self.fatorMovimento * cos, dy=-1 * self.\n fatorMovimento * sin)\n\n def moveLeft(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * cos, dy=self.\n fatorMovimento * sin)\n\n def _translate(self, dx=0, dy=0):\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n translate_matrix)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def zoomIn(self):\n self._scale(scale=0.9)\n self.fatorMovimento = self.fatorMovimento * 0.9\n\n def zoomOut(self):\n self._scale(scale=1.1)\n self.fatorMovimento = self.fatorMovimento * 1.1\n\n def _scale(self, scale=1):\n cx, cy = self.newCenter\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n transformations = numpy.matmul(translate_matrix_1, scale_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n transformations)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def rotateRight(self, angle):\n self._rotate(360 - angle)\n\n def rotateLeft(self, angle):\n self._rotate(angle)\n\n def _rotate(self, angle=0):\n self.degrees = (self.degrees + angle) % 360\n cx, cy = self.newCenter\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1], [\n self.xyw_2[0], self.xyw_2[1], 1], [self.xyw_3[0], self.xyw_3[1],\n 1], [self.xyw_4[0], self.xyw_4[1], 1]])\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n radians = numpy.radians(angle)\n sin = numpy.sin(radians)\n cos = numpy.cos(radians)\n rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n transformations = numpy.matmul(translate_matrix_1, rotate_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords,\n transformations)\n self.xyw_1 = xyw_1[0], xyw_1[1]\n self.xyw_2 = xyw_2[0], xyw_2[1]\n self.xyw_3 = xyw_3[0], xyw_3[1]\n self.xyw_4 = xyw_4[0], xyw_4[1]\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n self.newCenter = self.calcCenter()\n self.scn()\n\n def scn(self):\n cx, cy = self.newCenter\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [-1 * cx, -\n 1 * cy, 1]])\n radians = numpy.radians(-1 * self.degrees)\n sin = numpy.sin(radians)\n cos = numpy.cos(radians)\n rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])\n length, height = self.getWindowDimensions()\n sx = 1 / (length / 2)\n sy = 1 / (height / 2)\n scale_matrix = numpy.array([[sx, 0, 0], [0, sy, 0], [0, 0, 1]])\n scn = numpy.matmul(translate_matrix_1, rotate_matrix)\n self.window_scn = numpy.matmul(scn, scale_matrix)\n\n def applySCN(self, x, y):\n point_coords = numpy.array([x, y, 1])\n final_coords = numpy.matmul(point_coords, self.window_scn)\n return final_coords[0], final_coords[1]\n",
"step-5": "import numpy\nfrom math import cos, sin, radians, tan\n\nclass Window:\n # construtor\n def __init__(self, world, xyw_min=None, xyw_max=None):\n self.world = world\n # caso em q é None\n if xyw_min is None or xyw_max is None:\n self.xyw_min = (-100, -100)\n self.xyw_max = (100, 100)\n # caso em q n é None\n else:\n if not isinstance(xyw_min, tuple) or len(xyw_min) != 2:\n raise Exception('O param xyw_min deve ser uma tupla de 2 valores.')\n try:\n self.xyw_min = (float(xyw_min[0]), float(xyw_min[1]))\n except Exception:\n raise Exception('As coordenadas xyw_min devem ser pares de números.')\n\n if not isinstance(xyw_max, tuple) or len(xyw_max) != 2:\n raise Exception('O param xyw_max deve ser uma tupla de 2 valores.')\n try:\n self.xyw_max = (float(xyw_max[0]), float(xyw_max[1]))\n except Exception:\n raise Exception('As coordenadas xyw_max devem ser pares de números.')\n self.xyw_1 = self.xyw_min\n self.xyw_2 = (self.xyw_max[0], self.xyw_min[1])\n self.xyw_3 = (self.xyw_min[0], self.xyw_max[1])\n self.xyw_4 = self.xyw_max\n # define o centro original da window(attr q pode ser usado para trazer a view de volta ao seu centro original)\n self.center = self.calcCenter()\n # define o novo centro(var que pode ser utilizada em futuros calculos envolvendo o centro da window)\n self.newCenter = self.center\n self.fatorMovimento = 10\n self.window_scn = numpy.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n # inicializa scn da window\n self.degrees = 0\n self.scn()\n\n def set_xyw_min(self, xmin, ymin):\n self.xyw_min = (xmin, ymin)\n\n def set_xyw_max(self, xmax, ymax):\n self.xyw_max = (xmax, ymax)\n\n # retorna as coordenadas (x,y) do centro da window\n def calcCenter(self) -> (float, float):\n return (self.xyw_min[0] + self.xyw_max[0]) / 2, (self.xyw_min[1] + self.xyw_max[1]) / 2\n\n # retorna as coordenadas do canto inferior esquerdo e canto superior direito da window\n def getCoords(self) -> (float, float, float, float):\n return self.xyw_min[0], self.xyw_min[1], self.xyw_max[0], self.xyw_max[1]\n\n # retorna a largura e profundidade da window\n def getWindowDimensions(self) -> (float, float):\n xyw1 = numpy.array([self.xyw_1[0], self.xyw_1[1]])\n xyw2 = numpy.array([self.xyw_2[0], self.xyw_2[1]])\n xyw3 = numpy.array([self.xyw_3[0], self.xyw_3[1]])\n return numpy.linalg.norm(xyw2 - xyw1), numpy.linalg.norm(xyw3 - xyw1)\n\n # translada a window para cima, do ponto de vista do usuario\n def moveUp(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * sin, dy=self.fatorMovimento * cos)\n\n # translada a window para baixo, do ponto de vista do usuario\n def moveDown(self):\n rad_angle = numpy.radians(self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=(-1) * self.fatorMovimento * sin, dy=(-1) * self.fatorMovimento * cos)\n\n # translada a window para direita, do ponto de vista do usuario\n def moveRight(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=(-1) * self.fatorMovimento * cos, dy=(-1) * self.fatorMovimento * sin)\n\n # translada a window para esquerda, do ponto de vista do usuario\n def moveLeft(self):\n rad_angle = numpy.radians(180 - self.degrees)\n sin = numpy.sin(rad_angle)\n cos = numpy.cos(rad_angle)\n self._translate(dx=self.fatorMovimento * cos, dy=self.fatorMovimento * sin)\n\n # realiza a translaçao da window\n def _translate(self, dx=0, dy=0):\n # cria a matriz de translacao do obj para um dx e dy qualquer\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],\n [self.xyw_2[0], self.xyw_2[1], 1],\n [self.xyw_3[0], self.xyw_3[1], 1],\n [self.xyw_4[0], self.xyw_4[1], 1]])\n # realiza a translacao\n translate_matrix = numpy.array([[1, 0, 0], [0, 1, 0], [dx, dy, 1]])\n # atualiza a window\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, translate_matrix)\n self.xyw_1 = (xyw_1[0], xyw_1[1])\n self.xyw_2 = (xyw_2[0], xyw_2[1])\n self.xyw_3 = (xyw_3[0], xyw_3[1])\n self.xyw_4 = (xyw_4[0], xyw_4[1])\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n # atualiza o centro\n self.newCenter = self.calcCenter()\n # atualiza scn\n self.scn()\n\n # Encolhe a window\n def zoomIn(self):\n self._scale(scale=0.9)\n self.fatorMovimento = self.fatorMovimento * 0.9\n\n # Aumenta a window\n def zoomOut(self):\n self._scale(scale=1.1)\n self.fatorMovimento = self.fatorMovimento * 1.1\n\n # Escalona a window\n def _scale(self, scale=1):\n # centro do obj\n cx, cy = self.newCenter\n # coords do mundo\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],\n [self.xyw_2[0], self.xyw_2[1], 1],\n [self.xyw_3[0], self.xyw_3[1], 1],\n [self.xyw_4[0], self.xyw_4[1], 1]])\n # ajusta o centro do mundo com o obj\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])\n # realiza o escalonamento(num sei se esse e o termo correto)\n scale_matrix = numpy.array([[scale, 0, 0], [0, scale, 0], [0, 0, 1]])\n # reverte o ajuste do centro do mundo com o obj\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n # monta uma matriz que aplica todas as transformacoes\n transformations = numpy.matmul(translate_matrix_1, scale_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n # aplica as transformacoes\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, transformations)\n # atualiza xyw_min/max\n self.xyw_1 = (xyw_1[0], xyw_1[1])\n self.xyw_2 = (xyw_2[0], xyw_2[1])\n self.xyw_3 = (xyw_3[0], xyw_3[1])\n self.xyw_4 = (xyw_4[0], xyw_4[1])\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n # atualiza o centro\n self.newCenter = self.calcCenter()\n # atualiza scn\n self.scn()\n\n # Rotaciona a window no sentido horario\n def rotateRight(self, angle):\n # 360 - 10 = 350\n self._rotate(360 - angle)\n\n # Rotaciona a window no sentido anti-horario\n def rotateLeft(self, angle):\n self._rotate(angle)\n\n # Rotaciona a window em relaçao ao seu proprio centro\n def _rotate(self, angle=0):\n self.degrees = (self.degrees + angle) % 360\n # centro do obj\n cx, cy = self.newCenter\n # coords do mundo\n window_coords = numpy.array([[self.xyw_1[0], self.xyw_1[1], 1],\n [self.xyw_2[0], self.xyw_2[1], 1],\n [self.xyw_3[0], self.xyw_3[1], 1],\n [self.xyw_4[0], self.xyw_4[1], 1]])\n # ajusta o centro do mundo com o obj\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])\n # realiza a rotacao\n radians = numpy.radians(angle)\n sin = numpy.sin(radians)\n cos = numpy.cos(radians)\n rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])\n # reverte a transformacao feita\n translate_matrix_2 = numpy.array([[1, 0, 0], [0, 1, 0], [cx, cy, 1]])\n # gera a matriz de transformacao de rotacao\n transformations = numpy.matmul(translate_matrix_1, rotate_matrix)\n transformations = numpy.matmul(transformations, translate_matrix_2)\n # aplica as transformacoes\n xyw_1, xyw_2, xyw_3, xyw_4 = numpy.matmul(window_coords, transformations)\n # atualiza xyw_min/max\n self.xyw_1 = (xyw_1[0], xyw_1[1])\n self.xyw_2 = (xyw_2[0], xyw_2[1])\n self.xyw_3 = (xyw_3[0], xyw_3[1])\n self.xyw_4 = (xyw_4[0], xyw_4[1])\n self.xyw_min = self.xyw_1\n self.xyw_max = self.xyw_4\n # atualiza o centro\n self.newCenter = self.calcCenter()\n # atualiza scn\n self.scn()\n\n # Calcula a matriz de transformaçao de sistemas de coordenadas da window\n def scn(self):\n # centro do obj\n cx, cy = self.newCenter\n # ajusta o centro do mundo com o obj\n translate_matrix_1 = numpy.array([[1, 0, 0], [0, 1, 0], [(-1) * cx, (-1) * cy, 1]])\n # pega ao INVERSO da rotacao atual da window\n radians = numpy.radians((-1) * self.degrees)\n sin = numpy.sin(radians)\n cos = numpy.cos(radians)\n # rotaciona\n rotate_matrix = numpy.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])\n length, height = self.getWindowDimensions()\n sx = 1 / (length / 2)\n sy = 1 / (height / 2)\n # realiza o escalonamento(num sei se esse e o termo correto)\n scale_matrix = numpy.array([[sx, 0, 0], [0, sy, 0], [0, 0, 1]])\n # gera a matriz de conversao para scn da window\n scn = numpy.matmul(translate_matrix_1, rotate_matrix)\n self.window_scn = numpy.matmul(scn, scale_matrix)\n\n # Aplica a matriz de transformaçao de sistema de coordenadas da window a um ponto qualquer\n def applySCN(self, x, y):\n point_coords = numpy.array([x, y, 1])\n final_coords = numpy.matmul(point_coords, self.window_scn)\n return final_coords[0], final_coords[1]\n",
"step-ids": [
15,
16,
18,
20,
22
]
}
|
[
15,
16,
18,
20,
22
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(type(df))
print('\n')
print(df)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
dict_data = {'c0': [1, 2, 3], 'c1': [4, 5, 6], 'c2': [7, 8, 9], 'c3': [10,
11, 12], 'c4': [13, 14, 15]}
df = pd.DataFrame(dict_data)
print(type(df))
print('\n')
print(df)
<|reserved_special_token_1|>
import pandas as pd
dict_data = {'c0': [1, 2, 3], 'c1': [4, 5, 6], 'c2': [7, 8, 9], 'c3': [10,
11, 12], 'c4': [13, 14, 15]}
df = pd.DataFrame(dict_data)
print(type(df))
print('\n')
print(df)
<|reserved_special_token_1|>
import pandas as pd
dict_data = {'c0': [1, 2, 3], 'c1': [4, 5, 6], 'c2': [
7, 8, 9], 'c3': [10, 11, 12], 'c4': [13, 14, 15]}
df = pd.DataFrame(dict_data)
print(type(df))
print('\n')
print(df)
# <class 'pandas.core.frame.DataFrame'>
# c0 c1 c2 c3 c4
# 0 1 4 7 10 13
# 1 2 5 8 11 14
# 2 3 6 9 12 15
|
flexible
|
{
"blob_id": "22f4ae755e7ea43604db39452ca80f44f540708a",
"index": 9503,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(type(df))\nprint('\\n')\nprint(df)\n",
"step-3": "<mask token>\ndict_data = {'c0': [1, 2, 3], 'c1': [4, 5, 6], 'c2': [7, 8, 9], 'c3': [10, \n 11, 12], 'c4': [13, 14, 15]}\ndf = pd.DataFrame(dict_data)\nprint(type(df))\nprint('\\n')\nprint(df)\n",
"step-4": "import pandas as pd\ndict_data = {'c0': [1, 2, 3], 'c1': [4, 5, 6], 'c2': [7, 8, 9], 'c3': [10, \n 11, 12], 'c4': [13, 14, 15]}\ndf = pd.DataFrame(dict_data)\nprint(type(df))\nprint('\\n')\nprint(df)\n",
"step-5": "import pandas as pd\n\ndict_data = {'c0': [1, 2, 3], 'c1': [4, 5, 6], 'c2': [\n 7, 8, 9], 'c3': [10, 11, 12], 'c4': [13, 14, 15]}\n\ndf = pd.DataFrame(dict_data)\n\nprint(type(df))\nprint('\\n')\nprint(df)\n\n# <class 'pandas.core.frame.DataFrame'>\n\n\n# c0 c1 c2 c3 c4\n# 0 1 4 7 10 13\n# 1 2 5 8 11 14\n# 2 3 6 9 12 15\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
def describe():
desc = """
Problem : Given a string, find the length of the longest substring in it with no more than K distinct characters.
For example :
Input: String="araaci", K=2
Output: 4
Explanation: The longest substring with no more than '2' distinct characters is "araa", where the distinct chars are 'a' and 'r'.
-----------
"""
print(desc)
# Time complexity is O(n)
def find_substr_with_distinct_chars(str, k):
if len(str) == 0 or k <= 0:
return ""
maxSubstr = "" # Maintains final result
charMap = {}
head = 0 # Pointer of the start of the sliding window
# tail is the end pointer of the sliding window
for tail in range(0, len(str)):
tailChar = str[tail]
if tailChar not in charMap:
charMap[tailChar] = 0
charMap[tailChar] += 1
while len(charMap) > k:
headChar = str[head]
charMap[headChar] -= 1
if charMap[headChar] == 0:
del charMap[headChar]
head += 1
substr = str[head:tail+1]
if len(substr) > len(maxSubstr):
maxSubstr = substr
return maxSubstr
def main():
describe()
str = "araaci"
k = 2
res = find_substr_with_distinct_chars(str, k)
print("Input", str, k)
print("Longest substring with k distinct chars is : ", res)
print("Length of longest such substring is : ", len(res))
main()
|
normal
|
{
"blob_id": "1a730f4a5fa2be434af41a3e320cab8338d93644",
"index": 5050,
"step-1": "<mask token>\n\n\ndef main():\n describe()\n str = 'araaci'\n k = 2\n res = find_substr_with_distinct_chars(str, k)\n print('Input', str, k)\n print('Longest substring with k distinct chars is : ', res)\n print('Length of longest such substring is : ', len(res))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef find_substr_with_distinct_chars(str, k):\n if len(str) == 0 or k <= 0:\n return ''\n maxSubstr = ''\n charMap = {}\n head = 0\n for tail in range(0, len(str)):\n tailChar = str[tail]\n if tailChar not in charMap:\n charMap[tailChar] = 0\n charMap[tailChar] += 1\n while len(charMap) > k:\n headChar = str[head]\n charMap[headChar] -= 1\n if charMap[headChar] == 0:\n del charMap[headChar]\n head += 1\n substr = str[head:tail + 1]\n if len(substr) > len(maxSubstr):\n maxSubstr = substr\n return maxSubstr\n\n\ndef main():\n describe()\n str = 'araaci'\n k = 2\n res = find_substr_with_distinct_chars(str, k)\n print('Input', str, k)\n print('Longest substring with k distinct chars is : ', res)\n print('Length of longest such substring is : ', len(res))\n\n\n<mask token>\n",
"step-3": "def describe():\n desc = \"\"\"\nProblem : Given a string, find the length of the longest substring in it with no more than K distinct characters.\nFor example :\n Input: String=\"araaci\", K=2\n Output: 4\n Explanation: The longest substring with no more than '2' distinct characters is \"araa\", where the distinct chars are 'a' and 'r'.\n\n-----------\n \"\"\"\n print(desc)\n\n\ndef find_substr_with_distinct_chars(str, k):\n if len(str) == 0 or k <= 0:\n return ''\n maxSubstr = ''\n charMap = {}\n head = 0\n for tail in range(0, len(str)):\n tailChar = str[tail]\n if tailChar not in charMap:\n charMap[tailChar] = 0\n charMap[tailChar] += 1\n while len(charMap) > k:\n headChar = str[head]\n charMap[headChar] -= 1\n if charMap[headChar] == 0:\n del charMap[headChar]\n head += 1\n substr = str[head:tail + 1]\n if len(substr) > len(maxSubstr):\n maxSubstr = substr\n return maxSubstr\n\n\ndef main():\n describe()\n str = 'araaci'\n k = 2\n res = find_substr_with_distinct_chars(str, k)\n print('Input', str, k)\n print('Longest substring with k distinct chars is : ', res)\n print('Length of longest such substring is : ', len(res))\n\n\n<mask token>\n",
"step-4": "def describe():\n desc = \"\"\"\nProblem : Given a string, find the length of the longest substring in it with no more than K distinct characters.\nFor example :\n Input: String=\"araaci\", K=2\n Output: 4\n Explanation: The longest substring with no more than '2' distinct characters is \"araa\", where the distinct chars are 'a' and 'r'.\n\n-----------\n \"\"\"\n print(desc)\n\n\ndef find_substr_with_distinct_chars(str, k):\n if len(str) == 0 or k <= 0:\n return ''\n maxSubstr = ''\n charMap = {}\n head = 0\n for tail in range(0, len(str)):\n tailChar = str[tail]\n if tailChar not in charMap:\n charMap[tailChar] = 0\n charMap[tailChar] += 1\n while len(charMap) > k:\n headChar = str[head]\n charMap[headChar] -= 1\n if charMap[headChar] == 0:\n del charMap[headChar]\n head += 1\n substr = str[head:tail + 1]\n if len(substr) > len(maxSubstr):\n maxSubstr = substr\n return maxSubstr\n\n\ndef main():\n describe()\n str = 'araaci'\n k = 2\n res = find_substr_with_distinct_chars(str, k)\n print('Input', str, k)\n print('Longest substring with k distinct chars is : ', res)\n print('Length of longest such substring is : ', len(res))\n\n\nmain()\n",
"step-5": "def describe():\n desc = \"\"\"\nProblem : Given a string, find the length of the longest substring in it with no more than K distinct characters.\nFor example :\n Input: String=\"araaci\", K=2\n Output: 4\n Explanation: The longest substring with no more than '2' distinct characters is \"araa\", where the distinct chars are 'a' and 'r'.\n\n-----------\n \"\"\"\n print(desc)\n\n\n# Time complexity is O(n)\ndef find_substr_with_distinct_chars(str, k):\n if len(str) == 0 or k <= 0:\n return \"\"\n maxSubstr = \"\" # Maintains final result\n charMap = {}\n head = 0 # Pointer of the start of the sliding window\n # tail is the end pointer of the sliding window\n for tail in range(0, len(str)):\n tailChar = str[tail]\n if tailChar not in charMap:\n charMap[tailChar] = 0\n charMap[tailChar] += 1\n\n while len(charMap) > k:\n headChar = str[head]\n charMap[headChar] -= 1\n if charMap[headChar] == 0:\n del charMap[headChar]\n head += 1\n\n substr = str[head:tail+1]\n if len(substr) > len(maxSubstr):\n maxSubstr = substr\n\n return maxSubstr\n\n\ndef main():\n describe()\n str = \"araaci\"\n k = 2\n res = find_substr_with_distinct_chars(str, k)\n print(\"Input\", str, k)\n print(\"Longest substring with k distinct chars is : \", res)\n print(\"Length of longest such substring is : \", len(res))\n\nmain()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""
This is the hourly animation program. It displays a series of images across the board.
It is hard coded to work with the Sonic images. Adjustments would need to be made to
the y values which are distance traveled. Change sonicFrame < 8 value to the total
number of frames the new animation has.
"""
from runImages import *
def animationDisplay():
matrix.Clear()
sonicRun = 0
sonicFrame = 0
y = 0
while y < 70:
sonicFrame = 0
if sonicRun >= 100:
sonicRun = 0
y = y + 15
while sonicFrame < 8:
animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'
imageDisplay(animationFrame, sonicRun, y)
time.sleep(0.05)
sonicRun = sonicRun + 6
sonicFrame = sonicFrame + 1
|
normal
|
{
"blob_id": "ede675c971ed233e93c14aa4d2ffb66fe7ba775a",
"index": 5613,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef animationDisplay():\n matrix.Clear()\n sonicRun = 0\n sonicFrame = 0\n y = 0\n while y < 70:\n sonicFrame = 0\n if sonicRun >= 100:\n sonicRun = 0\n y = y + 15\n while sonicFrame < 8:\n animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'\n imageDisplay(animationFrame, sonicRun, y)\n time.sleep(0.05)\n sonicRun = sonicRun + 6\n sonicFrame = sonicFrame + 1\n",
"step-3": "<mask token>\nfrom runImages import *\n\n\ndef animationDisplay():\n matrix.Clear()\n sonicRun = 0\n sonicFrame = 0\n y = 0\n while y < 70:\n sonicFrame = 0\n if sonicRun >= 100:\n sonicRun = 0\n y = y + 15\n while sonicFrame < 8:\n animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'\n imageDisplay(animationFrame, sonicRun, y)\n time.sleep(0.05)\n sonicRun = sonicRun + 6\n sonicFrame = sonicFrame + 1\n",
"step-4": "\"\"\"\nThis is the hourly animation program. It displays a series of images across the board.\nIt is hard coded to work with the Sonic images. Adjustments would need to be made to\nthe y values which are distance traveled. Change sonicFrame < 8 value to the total\nnumber of frames the new animation has.\n\"\"\"\nfrom runImages import *\n\ndef animationDisplay():\n matrix.Clear()\n sonicRun = 0\n sonicFrame = 0\n y = 0\n while y < 70:\n sonicFrame = 0\n if sonicRun >= 100:\n sonicRun = 0\n y = y + 15\n while sonicFrame < 8:\n animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'\n imageDisplay(animationFrame, sonicRun, y)\n time.sleep(0.05)\n sonicRun = sonicRun + 6\n sonicFrame = sonicFrame + 1\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from django.shortcuts import render,get_object_or_404, redirect
from django.contrib import admin #어드민 쓸꺼면 써야됨
from .models import Blog #앱을 가지고 오겠다는거
from django.utils import timezone
admin.site.register(Blog) #블로그 형식을 가져와 등록하겠다.
# Create your views here.
def home(request):
blogs = Blog.objects
return render(request,'home.html',{'blogs':blogs})
def detail(request,blog_id):
blog_detail= get_object_or_404(Blog,pk=blog_id)
return render(request,'detail.html',{'blog': blog_detail})
def new(request):
return render(request,'new.html')
def create(request):
blog=Blog()
blog.title=request.GET['title']
blog.body=request.GET['body']
blog.pub_date=timezone.datetime.now()
blog.save()
return redirect('/blog/'+str(blog.id))
|
normal
|
{
"blob_id": "bc25338612f525f616fb26c64d8b36667d297d40",
"index": 3921,
"step-1": "<mask token>\n\n\ndef home(request):\n blogs = Blog.objects\n return render(request, 'home.html', {'blogs': blogs})\n\n\ndef detail(request, blog_id):\n blog_detail = get_object_or_404(Blog, pk=blog_id)\n return render(request, 'detail.html', {'blog': blog_detail})\n\n\n<mask token>\n\n\ndef create(request):\n blog = Blog()\n blog.title = request.GET['title']\n blog.body = request.GET['body']\n blog.pub_date = timezone.datetime.now()\n blog.save()\n return redirect('/blog/' + str(blog.id))\n",
"step-2": "<mask token>\n\n\ndef home(request):\n blogs = Blog.objects\n return render(request, 'home.html', {'blogs': blogs})\n\n\ndef detail(request, blog_id):\n blog_detail = get_object_or_404(Blog, pk=blog_id)\n return render(request, 'detail.html', {'blog': blog_detail})\n\n\ndef new(request):\n return render(request, 'new.html')\n\n\ndef create(request):\n blog = Blog()\n blog.title = request.GET['title']\n blog.body = request.GET['body']\n blog.pub_date = timezone.datetime.now()\n blog.save()\n return redirect('/blog/' + str(blog.id))\n",
"step-3": "<mask token>\nadmin.site.register(Blog)\n\n\ndef home(request):\n blogs = Blog.objects\n return render(request, 'home.html', {'blogs': blogs})\n\n\ndef detail(request, blog_id):\n blog_detail = get_object_or_404(Blog, pk=blog_id)\n return render(request, 'detail.html', {'blog': blog_detail})\n\n\ndef new(request):\n return render(request, 'new.html')\n\n\ndef create(request):\n blog = Blog()\n blog.title = request.GET['title']\n blog.body = request.GET['body']\n blog.pub_date = timezone.datetime.now()\n blog.save()\n return redirect('/blog/' + str(blog.id))\n",
"step-4": "from django.shortcuts import render, get_object_or_404, redirect\nfrom django.contrib import admin\nfrom .models import Blog\nfrom django.utils import timezone\nadmin.site.register(Blog)\n\n\ndef home(request):\n blogs = Blog.objects\n return render(request, 'home.html', {'blogs': blogs})\n\n\ndef detail(request, blog_id):\n blog_detail = get_object_or_404(Blog, pk=blog_id)\n return render(request, 'detail.html', {'blog': blog_detail})\n\n\ndef new(request):\n return render(request, 'new.html')\n\n\ndef create(request):\n blog = Blog()\n blog.title = request.GET['title']\n blog.body = request.GET['body']\n blog.pub_date = timezone.datetime.now()\n blog.save()\n return redirect('/blog/' + str(blog.id))\n",
"step-5": "from django.shortcuts import render,get_object_or_404, redirect\nfrom django.contrib import admin #어드민 쓸꺼면 써야됨\nfrom .models import Blog #앱을 가지고 오겠다는거\nfrom django.utils import timezone\n\nadmin.site.register(Blog) #블로그 형식을 가져와 등록하겠다.\n# Create your views here.\ndef home(request):\n blogs = Blog.objects\n return render(request,'home.html',{'blogs':blogs})\n\ndef detail(request,blog_id):\n blog_detail= get_object_or_404(Blog,pk=blog_id)\n return render(request,'detail.html',{'blog': blog_detail})\n\ndef new(request):\n return render(request,'new.html')\n\ndef create(request):\n blog=Blog()\n blog.title=request.GET['title']\n blog.body=request.GET['body']\n blog.pub_date=timezone.datetime.now()\n blog.save()\n return redirect('/blog/'+str(blog.id))",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
def clusterVacio():
arreAux = []
busca = 1
bandera = True
for i in range(len(clusters)):
clu = clusters[i]
arreAux.append(int(clu[0]))
print(arreAux)
while bandera:
if busca in arreAux:
busca = busca + 1
else:
bandera = False
return busca
def tablaArchivos():
global archivos
global tams
global clusters
archivos = []
tams = []
clusters = []
file = open('fiunamfs.img', 'r+')
file.seek(2048)
for i in range(64):
archivos.append(file.read(15))
tams.append(file.read(8))
clusters.append(file.read(5))
file.seek(file.tell() + 36)
file.close()
def info():
print('Nombre del Sistema: ' + nombre)
print('Version: ' + version)
print('Etiqueta del Volumen: ' + etiqueta)
print('Tamano del cluster en bytes: ' + cluster)
print('Numero de clusters que mide el directorio: ' + numero)
print('Numero de cluster que mide la unidad completa: ' + numeroCompleto)
def listar():
file = open('fiunamfs.img', 'r')
file.seek(2048)
for i in range(64):
name = file.read(15)
if name != 'Xx.xXx.xXx.xXx.':
print(name)
file.seek(file.tell() + 49)
file.close()
def borrar(archivo):
borrado = False
file = open('fiunamfs.img', 'r+')
file.seek(2048)
for i in range(64):
name = file.read(15)
aux = name.strip()
if aux == archivo:
file.seek(file.tell() - 15)
file.write('Xx.xXx.xXx.xXx.')
borrado = True
file.seek(file.tell() + 49)
file.close()
return borrado
def tamaArchivo(path):
si = stat(path).st_size
return si
<|reserved_special_token_0|>
def deSistemaAPc(archivo, nombre):
tam = 0
clu = 0
file = open('fiunamfs.img', 'r')
file.seek(2048)
new = open(archivo, 'r+')
for i in range(64):
name = file.read(15)
aux = name.strip()
if aux == nombre:
tam = file.read(8)
clu = file.read(5)
file.close()
aux2 = 2048 * clu
file = open('fiunamfs.img', 'r')
file.seek(aux2)
new.write(file.read(tam))
def nombreArchivo(path):
tam = len(path)
slash = 0
name = ''
name2 = ''
for i in range(tam):
if path[i] == '/':
slash = i
for i in range(slash + 1, tam):
name = name + path[i]
espaces = 15 - len(name)
for i in range(espaces):
name2 = name2 + ' '
return name2 + name
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def clusterVacio():
arreAux = []
busca = 1
bandera = True
for i in range(len(clusters)):
clu = clusters[i]
arreAux.append(int(clu[0]))
print(arreAux)
while bandera:
if busca in arreAux:
busca = busca + 1
else:
bandera = False
return busca
def tablaArchivos():
global archivos
global tams
global clusters
archivos = []
tams = []
clusters = []
file = open('fiunamfs.img', 'r+')
file.seek(2048)
for i in range(64):
archivos.append(file.read(15))
tams.append(file.read(8))
clusters.append(file.read(5))
file.seek(file.tell() + 36)
file.close()
def info():
print('Nombre del Sistema: ' + nombre)
print('Version: ' + version)
print('Etiqueta del Volumen: ' + etiqueta)
print('Tamano del cluster en bytes: ' + cluster)
print('Numero de clusters que mide el directorio: ' + numero)
print('Numero de cluster que mide la unidad completa: ' + numeroCompleto)
def listar():
file = open('fiunamfs.img', 'r')
file.seek(2048)
for i in range(64):
name = file.read(15)
if name != 'Xx.xXx.xXx.xXx.':
print(name)
file.seek(file.tell() + 49)
file.close()
def borrar(archivo):
borrado = False
file = open('fiunamfs.img', 'r+')
file.seek(2048)
for i in range(64):
name = file.read(15)
aux = name.strip()
if aux == archivo:
file.seek(file.tell() - 15)
file.write('Xx.xXx.xXx.xXx.')
borrado = True
file.seek(file.tell() + 49)
file.close()
return borrado
def tamaArchivo(path):
si = stat(path).st_size
return si
def dePcASistema(path, nombre):
posicion = 0
actual = 0
try:
new = open(path, 'r+')
file = open('fiunamfs.img', 'r+')
file.seek(2048)
bandera = False
tam = stat(path).st_size
while bandera == False:
name = file.read(15)
if name == 'Xx.xXx.xXx.xXx.':
file.seek(file.tell() - 15)
file.write(nombre)
actual = file.tell()
print('El archivo fue copiado')
bandera = True
file.seek(file.tell() + 49)
file.close()
file = open('fiunamfs.img', 'r+')
pa = clusterVacio()
inde = 2048 * pa
tamano = tamaArchivo(path)
file.seek(inde)
file.write(new.read(tamano))
file.close()
file = open('fiunamfs.img', 'r+')
file.seek(actual)
file.write(str(pa))
file.close()
except:
print('Este archivo no existe')
def deSistemaAPc(archivo, nombre):
tam = 0
clu = 0
file = open('fiunamfs.img', 'r')
file.seek(2048)
new = open(archivo, 'r+')
for i in range(64):
name = file.read(15)
aux = name.strip()
if aux == nombre:
tam = file.read(8)
clu = file.read(5)
file.close()
aux2 = 2048 * clu
file = open('fiunamfs.img', 'r')
file.seek(aux2)
new.write(file.read(tam))
def nombreArchivo(path):
tam = len(path)
slash = 0
name = ''
name2 = ''
for i in range(tam):
if path[i] == '/':
slash = i
for i in range(slash + 1, tam):
name = name + path[i]
espaces = 15 - len(name)
for i in range(espaces):
name2 = name2 + ' '
return name2 + name
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
file.seek(10)
<|reserved_special_token_0|>
file.seek(20)
<|reserved_special_token_0|>
file.seek(40)
<|reserved_special_token_0|>
file.seek(47)
<|reserved_special_token_0|>
file.seek(52)
<|reserved_special_token_0|>
file.close()
<|reserved_special_token_0|>
def clusterVacio():
arreAux = []
busca = 1
bandera = True
for i in range(len(clusters)):
clu = clusters[i]
arreAux.append(int(clu[0]))
print(arreAux)
while bandera:
if busca in arreAux:
busca = busca + 1
else:
bandera = False
return busca
def tablaArchivos():
global archivos
global tams
global clusters
archivos = []
tams = []
clusters = []
file = open('fiunamfs.img', 'r+')
file.seek(2048)
for i in range(64):
archivos.append(file.read(15))
tams.append(file.read(8))
clusters.append(file.read(5))
file.seek(file.tell() + 36)
file.close()
def info():
print('Nombre del Sistema: ' + nombre)
print('Version: ' + version)
print('Etiqueta del Volumen: ' + etiqueta)
print('Tamano del cluster en bytes: ' + cluster)
print('Numero de clusters que mide el directorio: ' + numero)
print('Numero de cluster que mide la unidad completa: ' + numeroCompleto)
def listar():
file = open('fiunamfs.img', 'r')
file.seek(2048)
for i in range(64):
name = file.read(15)
if name != 'Xx.xXx.xXx.xXx.':
print(name)
file.seek(file.tell() + 49)
file.close()
def borrar(archivo):
borrado = False
file = open('fiunamfs.img', 'r+')
file.seek(2048)
for i in range(64):
name = file.read(15)
aux = name.strip()
if aux == archivo:
file.seek(file.tell() - 15)
file.write('Xx.xXx.xXx.xXx.')
borrado = True
file.seek(file.tell() + 49)
file.close()
return borrado
def tamaArchivo(path):
si = stat(path).st_size
return si
def dePcASistema(path, nombre):
posicion = 0
actual = 0
try:
new = open(path, 'r+')
file = open('fiunamfs.img', 'r+')
file.seek(2048)
bandera = False
tam = stat(path).st_size
while bandera == False:
name = file.read(15)
if name == 'Xx.xXx.xXx.xXx.':
file.seek(file.tell() - 15)
file.write(nombre)
actual = file.tell()
print('El archivo fue copiado')
bandera = True
file.seek(file.tell() + 49)
file.close()
file = open('fiunamfs.img', 'r+')
pa = clusterVacio()
inde = 2048 * pa
tamano = tamaArchivo(path)
file.seek(inde)
file.write(new.read(tamano))
file.close()
file = open('fiunamfs.img', 'r+')
file.seek(actual)
file.write(str(pa))
file.close()
except:
print('Este archivo no existe')
def deSistemaAPc(archivo, nombre):
tam = 0
clu = 0
file = open('fiunamfs.img', 'r')
file.seek(2048)
new = open(archivo, 'r+')
for i in range(64):
name = file.read(15)
aux = name.strip()
if aux == nombre:
tam = file.read(8)
clu = file.read(5)
file.close()
aux2 = 2048 * clu
file = open('fiunamfs.img', 'r')
file.seek(aux2)
new.write(file.read(tam))
def nombreArchivo(path):
tam = len(path)
slash = 0
name = ''
name2 = ''
for i in range(tam):
if path[i] == '/':
slash = i
for i in range(slash + 1, tam):
name = name + path[i]
espaces = 15 - len(name)
for i in range(espaces):
name2 = name2 + ' '
return name2 + name
if nombre == 'FiUnamFS' and version == '0.7':
correcto = True
while correcto:
tablaArchivos()
print('Sistema de Archivos FI Unam FS')
print('1: Listar')
print('2: Copiar archivo')
print('3: Copiar archivo a la computadora')
print('4: Eliminar archivo')
print('5: Desgramentar')
print('6: Mostar informacion del sistema de archivos')
print('7: Salir')
opcion = input('Opcion: ')
if opcion == 6:
info()
elif opcion == 1:
listar()
elif opcion == 4:
archivo = raw_input('Nombre del archivo a borrar: ')
if borrar(archivo):
print('El archivo fue borrado')
else:
print('No se encontro el archivo')
elif opcion == 3:
archivo = raw_input('Nombre del archivo a copiar: ')
nombre = nombreArchivo(archivo)
deSistemaAPc(archivo, nombre)
elif opcion == 2:
archivo = raw_input('Nombre del archivo a copiar: ')
nombre = nombreArchivo(archivo)
dePcASistema(archivo, nombre)
elif opcion == 9:
print(archivos)
print(clusters)
print(tams)
elif opcion == 8:
va = clusterVacio()
print(va)
elif opcion == 7:
print('Sistema desmontado')
correcto = False
elif opcion == 5:
print('No se implemento')
else:
print(
'No se puede abrir el sistema de archivos debido a que no es el archivo correcto o la version correcta. Revise nuevamente que tenga la imagen correcta.'
)
exit()
<|reserved_special_token_1|>
from sys import exit
from os import stat
file = open('fiunamfs.img', 'r')
nombre = file.read(8)
file.seek(10)
version = file.read(3)
file.seek(20)
etiqueta = file.read(15)
file.seek(40)
cluster = file.read(5)
file.seek(47)
numero = file.read(2)
file.seek(52)
numeroCompleto = file.read(8)
file.close()
archivos = []
tams = []
clusters = []
def clusterVacio():
arreAux = []
busca = 1
bandera = True
for i in range(len(clusters)):
clu = clusters[i]
arreAux.append(int(clu[0]))
print(arreAux)
while bandera:
if busca in arreAux:
busca = busca + 1
else:
bandera = False
return busca
def tablaArchivos():
global archivos
global tams
global clusters
archivos = []
tams = []
clusters = []
file = open('fiunamfs.img', 'r+')
file.seek(2048)
for i in range(64):
archivos.append(file.read(15))
tams.append(file.read(8))
clusters.append(file.read(5))
file.seek(file.tell() + 36)
file.close()
def info():
print('Nombre del Sistema: ' + nombre)
print('Version: ' + version)
print('Etiqueta del Volumen: ' + etiqueta)
print('Tamano del cluster en bytes: ' + cluster)
print('Numero de clusters que mide el directorio: ' + numero)
print('Numero de cluster que mide la unidad completa: ' + numeroCompleto)
def listar():
file = open('fiunamfs.img', 'r')
file.seek(2048)
for i in range(64):
name = file.read(15)
if name != 'Xx.xXx.xXx.xXx.':
print(name)
file.seek(file.tell() + 49)
file.close()
def borrar(archivo):
borrado = False
file = open('fiunamfs.img', 'r+')
file.seek(2048)
for i in range(64):
name = file.read(15)
aux = name.strip()
if aux == archivo:
file.seek(file.tell() - 15)
file.write('Xx.xXx.xXx.xXx.')
borrado = True
file.seek(file.tell() + 49)
file.close()
return borrado
def tamaArchivo(path):
si = stat(path).st_size
return si
def dePcASistema(path, nombre):
posicion = 0
actual = 0
try:
new = open(path, 'r+')
file = open('fiunamfs.img', 'r+')
file.seek(2048)
bandera = False
tam = stat(path).st_size
while bandera == False:
name = file.read(15)
if name == 'Xx.xXx.xXx.xXx.':
file.seek(file.tell() - 15)
file.write(nombre)
actual = file.tell()
print('El archivo fue copiado')
bandera = True
file.seek(file.tell() + 49)
file.close()
file = open('fiunamfs.img', 'r+')
pa = clusterVacio()
inde = 2048 * pa
tamano = tamaArchivo(path)
file.seek(inde)
file.write(new.read(tamano))
file.close()
file = open('fiunamfs.img', 'r+')
file.seek(actual)
file.write(str(pa))
file.close()
except:
print('Este archivo no existe')
def deSistemaAPc(archivo, nombre):
tam = 0
clu = 0
file = open('fiunamfs.img', 'r')
file.seek(2048)
new = open(archivo, 'r+')
for i in range(64):
name = file.read(15)
aux = name.strip()
if aux == nombre:
tam = file.read(8)
clu = file.read(5)
file.close()
aux2 = 2048 * clu
file = open('fiunamfs.img', 'r')
file.seek(aux2)
new.write(file.read(tam))
def nombreArchivo(path):
tam = len(path)
slash = 0
name = ''
name2 = ''
for i in range(tam):
if path[i] == '/':
slash = i
for i in range(slash + 1, tam):
name = name + path[i]
espaces = 15 - len(name)
for i in range(espaces):
name2 = name2 + ' '
return name2 + name
if nombre == 'FiUnamFS' and version == '0.7':
correcto = True
while correcto:
tablaArchivos()
print('Sistema de Archivos FI Unam FS')
print('1: Listar')
print('2: Copiar archivo')
print('3: Copiar archivo a la computadora')
print('4: Eliminar archivo')
print('5: Desgramentar')
print('6: Mostar informacion del sistema de archivos')
print('7: Salir')
opcion = input('Opcion: ')
if opcion == 6:
info()
elif opcion == 1:
listar()
elif opcion == 4:
archivo = raw_input('Nombre del archivo a borrar: ')
if borrar(archivo):
print('El archivo fue borrado')
else:
print('No se encontro el archivo')
elif opcion == 3:
archivo = raw_input('Nombre del archivo a copiar: ')
nombre = nombreArchivo(archivo)
deSistemaAPc(archivo, nombre)
elif opcion == 2:
archivo = raw_input('Nombre del archivo a copiar: ')
nombre = nombreArchivo(archivo)
dePcASistema(archivo, nombre)
elif opcion == 9:
print(archivos)
print(clusters)
print(tams)
elif opcion == 8:
va = clusterVacio()
print(va)
elif opcion == 7:
print('Sistema desmontado')
correcto = False
elif opcion == 5:
print('No se implemento')
else:
print(
'No se puede abrir el sistema de archivos debido a que no es el archivo correcto o la version correcta. Revise nuevamente que tenga la imagen correcta.'
)
exit()
<|reserved_special_token_1|>
from sys import exit
from os import stat
file = open("fiunamfs.img","r")
nombre = file.read(8)
file.seek(10)
version = file.read(3)
file.seek(20)
etiqueta = file.read(15)
file.seek(40)
cluster = file.read(5)
file.seek(47)
numero = file.read(2)
file.seek(52)
numeroCompleto = file.read(8)
file.close()
archivos = []
tams = []
clusters = []
def clusterVacio():
arreAux = []
busca = 1
bandera = True
for i in range(len(clusters)):
clu=clusters[i]
arreAux.append(int(clu[0]))
print(arreAux)
while bandera:
if busca in arreAux:
busca = busca + 1
else:
bandera = False
return busca
def tablaArchivos():
global archivos
global tams
global clusters
archivos = []
tams = []
clusters = []
file = open("fiunamfs.img","r+")
file.seek(2048)
for i in range(64):
archivos.append(file.read(15))
tams.append(file.read(8))
clusters.append(file.read(5))
file.seek(file.tell()+36)
file.close()
def info():
print("Nombre del Sistema: " + nombre)
print("Version: " + version)
print("Etiqueta del Volumen: " + etiqueta)
print("Tamano del cluster en bytes: " + cluster)
print("Numero de clusters que mide el directorio: " + numero)
print("Numero de cluster que mide la unidad completa: " + numeroCompleto)
def listar():
file = open("fiunamfs.img","r")
file.seek(2048)
for i in range(64):
name = file.read(15)
if name != 'Xx.xXx.xXx.xXx.':
print(name)
file.seek(file.tell()+49)
file.close()
def borrar(archivo):
borrado = False
file = open("fiunamfs.img","r+")
file.seek(2048)
for i in range(64):
name = file.read(15)
aux = name.strip()
if aux == archivo:
file.seek(file.tell()-15)
file.write('Xx.xXx.xXx.xXx.')
borrado = True
file.seek(file.tell()+49)
file.close()
return borrado
def tamaArchivo(path):
si = stat(path).st_size
return si
def dePcASistema(path, nombre):
posicion =0
actual =0
try:
new = open(path,"r+")
file = open("fiunamfs.img","r+")
file.seek(2048)
bandera = False
tam = stat(path).st_size
while(bandera == False):
name = file.read(15)
if (name == 'Xx.xXx.xXx.xXx.'):
file.seek(file.tell()-15)
file.write(nombre)
actual = file.tell()
print("El archivo fue copiado")
bandera = True
file.seek(file.tell()+49)
file.close()
file = open("fiunamfs.img","r+")
pa = clusterVacio()
inde = 2048*pa
tamano = tamaArchivo(path)
file.seek(inde)
file.write(new.read(tamano))
file.close()
file = open("fiunamfs.img","r+")
file.seek(actual)
file.write(str(pa))
file.close()
except:
print("Este archivo no existe")
def deSistemaAPc(archivo,nombre):
tam = 0
clu = 0
file = open("fiunamfs.img","r") #Se abre el archivo en modo solo lectura
file.seek(2048) #Se salta el superbloque
new = open(archivo,"r+")
for i in range(64):
name = file.read(15)
aux = name.strip()
if (aux == nombre):
tam = file.read(8)
clu = file.read(5)
file.close()
aux2 = 2048*clu
file = open("fiunamfs.img","r")
file.seek(aux2)
new.write(file.read(tam))
def nombreArchivo(path):
tam = len(path)
slash = 0
name = ''
name2 = ''
for i in range(tam):
if (path[i] == '/'):
slash = i
for i in range(slash+1,tam):
name = name + path[i]
##Agregar funcion de limiar nombres de los archivos a 15 caracteres
espaces = 15 - len(name)
for i in range (espaces):
name2 = name2 + " "
return name2 + name
if (nombre == "FiUnamFS" and version == "0.7"):
correcto = True
while(correcto):
tablaArchivos()
print("Sistema de Archivos FI Unam FS")
print("1: Listar")
print("2: Copiar archivo")
print("3: Copiar archivo a la computadora")
print("4: Eliminar archivo")
print("5: Desgramentar")
print("6: Mostar informacion del sistema de archivos")
print("7: Salir")
opcion = input("Opcion: ")
if opcion == 6:
info()
elif opcion == 1:
listar()
elif opcion == 4:
archivo = raw_input("Nombre del archivo a borrar: ")
if(borrar(archivo)):
print('El archivo fue borrado')
else:
print('No se encontro el archivo')
elif opcion == 3:
archivo = raw_input("Nombre del archivo a copiar: ")
nombre = nombreArchivo(archivo)
deSistemaAPc(archivo, nombre)
elif opcion == 2:
archivo = raw_input("Nombre del archivo a copiar: ")
nombre = nombreArchivo(archivo)
dePcASistema(archivo, nombre)
elif opcion == 9:
print(archivos)
print(clusters)
print(tams)
elif opcion == 8:
va = clusterVacio()
print (va)
elif opcion == 7:
print("Sistema desmontado")
correcto = False
elif opcion == 5:
print("No se implemento")
else:
print("No se puede abrir el sistema de archivos debido a que no es el archivo correcto o la version correcta. Revise nuevamente que tenga la imagen correcta.")
exit()
|
flexible
|
{
"blob_id": "da69fd937153fe2112b9f64411882527274247ef",
"index": 1878,
"step-1": "<mask token>\n\n\ndef clusterVacio():\n arreAux = []\n busca = 1\n bandera = True\n for i in range(len(clusters)):\n clu = clusters[i]\n arreAux.append(int(clu[0]))\n print(arreAux)\n while bandera:\n if busca in arreAux:\n busca = busca + 1\n else:\n bandera = False\n return busca\n\n\ndef tablaArchivos():\n global archivos\n global tams\n global clusters\n archivos = []\n tams = []\n clusters = []\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n for i in range(64):\n archivos.append(file.read(15))\n tams.append(file.read(8))\n clusters.append(file.read(5))\n file.seek(file.tell() + 36)\n file.close()\n\n\ndef info():\n print('Nombre del Sistema: ' + nombre)\n print('Version: ' + version)\n print('Etiqueta del Volumen: ' + etiqueta)\n print('Tamano del cluster en bytes: ' + cluster)\n print('Numero de clusters que mide el directorio: ' + numero)\n print('Numero de cluster que mide la unidad completa: ' + numeroCompleto)\n\n\ndef listar():\n file = open('fiunamfs.img', 'r')\n file.seek(2048)\n for i in range(64):\n name = file.read(15)\n if name != 'Xx.xXx.xXx.xXx.':\n print(name)\n file.seek(file.tell() + 49)\n file.close()\n\n\ndef borrar(archivo):\n borrado = False\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n for i in range(64):\n name = file.read(15)\n aux = name.strip()\n if aux == archivo:\n file.seek(file.tell() - 15)\n file.write('Xx.xXx.xXx.xXx.')\n borrado = True\n file.seek(file.tell() + 49)\n file.close()\n return borrado\n\n\ndef tamaArchivo(path):\n si = stat(path).st_size\n return si\n\n\n<mask token>\n\n\ndef deSistemaAPc(archivo, nombre):\n tam = 0\n clu = 0\n file = open('fiunamfs.img', 'r')\n file.seek(2048)\n new = open(archivo, 'r+')\n for i in range(64):\n name = file.read(15)\n aux = name.strip()\n if aux == nombre:\n tam = file.read(8)\n clu = file.read(5)\n file.close()\n aux2 = 2048 * clu\n file = open('fiunamfs.img', 'r')\n file.seek(aux2)\n new.write(file.read(tam))\n\n\ndef nombreArchivo(path):\n tam = len(path)\n slash = 0\n name = ''\n name2 = ''\n for i in range(tam):\n if path[i] == '/':\n slash = i\n for i in range(slash + 1, tam):\n name = name + path[i]\n espaces = 15 - len(name)\n for i in range(espaces):\n name2 = name2 + ' '\n return name2 + name\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef clusterVacio():\n arreAux = []\n busca = 1\n bandera = True\n for i in range(len(clusters)):\n clu = clusters[i]\n arreAux.append(int(clu[0]))\n print(arreAux)\n while bandera:\n if busca in arreAux:\n busca = busca + 1\n else:\n bandera = False\n return busca\n\n\ndef tablaArchivos():\n global archivos\n global tams\n global clusters\n archivos = []\n tams = []\n clusters = []\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n for i in range(64):\n archivos.append(file.read(15))\n tams.append(file.read(8))\n clusters.append(file.read(5))\n file.seek(file.tell() + 36)\n file.close()\n\n\ndef info():\n print('Nombre del Sistema: ' + nombre)\n print('Version: ' + version)\n print('Etiqueta del Volumen: ' + etiqueta)\n print('Tamano del cluster en bytes: ' + cluster)\n print('Numero de clusters que mide el directorio: ' + numero)\n print('Numero de cluster que mide la unidad completa: ' + numeroCompleto)\n\n\ndef listar():\n file = open('fiunamfs.img', 'r')\n file.seek(2048)\n for i in range(64):\n name = file.read(15)\n if name != 'Xx.xXx.xXx.xXx.':\n print(name)\n file.seek(file.tell() + 49)\n file.close()\n\n\ndef borrar(archivo):\n borrado = False\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n for i in range(64):\n name = file.read(15)\n aux = name.strip()\n if aux == archivo:\n file.seek(file.tell() - 15)\n file.write('Xx.xXx.xXx.xXx.')\n borrado = True\n file.seek(file.tell() + 49)\n file.close()\n return borrado\n\n\ndef tamaArchivo(path):\n si = stat(path).st_size\n return si\n\n\ndef dePcASistema(path, nombre):\n posicion = 0\n actual = 0\n try:\n new = open(path, 'r+')\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n bandera = False\n tam = stat(path).st_size\n while bandera == False:\n name = file.read(15)\n if name == 'Xx.xXx.xXx.xXx.':\n file.seek(file.tell() - 15)\n file.write(nombre)\n actual = file.tell()\n print('El archivo fue copiado')\n bandera = True\n file.seek(file.tell() + 49)\n file.close()\n file = open('fiunamfs.img', 'r+')\n pa = clusterVacio()\n inde = 2048 * pa\n tamano = tamaArchivo(path)\n file.seek(inde)\n file.write(new.read(tamano))\n file.close()\n file = open('fiunamfs.img', 'r+')\n file.seek(actual)\n file.write(str(pa))\n file.close()\n except:\n print('Este archivo no existe')\n\n\ndef deSistemaAPc(archivo, nombre):\n tam = 0\n clu = 0\n file = open('fiunamfs.img', 'r')\n file.seek(2048)\n new = open(archivo, 'r+')\n for i in range(64):\n name = file.read(15)\n aux = name.strip()\n if aux == nombre:\n tam = file.read(8)\n clu = file.read(5)\n file.close()\n aux2 = 2048 * clu\n file = open('fiunamfs.img', 'r')\n file.seek(aux2)\n new.write(file.read(tam))\n\n\ndef nombreArchivo(path):\n tam = len(path)\n slash = 0\n name = ''\n name2 = ''\n for i in range(tam):\n if path[i] == '/':\n slash = i\n for i in range(slash + 1, tam):\n name = name + path[i]\n espaces = 15 - len(name)\n for i in range(espaces):\n name2 = name2 + ' '\n return name2 + name\n\n\n<mask token>\n",
"step-3": "<mask token>\nfile.seek(10)\n<mask token>\nfile.seek(20)\n<mask token>\nfile.seek(40)\n<mask token>\nfile.seek(47)\n<mask token>\nfile.seek(52)\n<mask token>\nfile.close()\n<mask token>\n\n\ndef clusterVacio():\n arreAux = []\n busca = 1\n bandera = True\n for i in range(len(clusters)):\n clu = clusters[i]\n arreAux.append(int(clu[0]))\n print(arreAux)\n while bandera:\n if busca in arreAux:\n busca = busca + 1\n else:\n bandera = False\n return busca\n\n\ndef tablaArchivos():\n global archivos\n global tams\n global clusters\n archivos = []\n tams = []\n clusters = []\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n for i in range(64):\n archivos.append(file.read(15))\n tams.append(file.read(8))\n clusters.append(file.read(5))\n file.seek(file.tell() + 36)\n file.close()\n\n\ndef info():\n print('Nombre del Sistema: ' + nombre)\n print('Version: ' + version)\n print('Etiqueta del Volumen: ' + etiqueta)\n print('Tamano del cluster en bytes: ' + cluster)\n print('Numero de clusters que mide el directorio: ' + numero)\n print('Numero de cluster que mide la unidad completa: ' + numeroCompleto)\n\n\ndef listar():\n file = open('fiunamfs.img', 'r')\n file.seek(2048)\n for i in range(64):\n name = file.read(15)\n if name != 'Xx.xXx.xXx.xXx.':\n print(name)\n file.seek(file.tell() + 49)\n file.close()\n\n\ndef borrar(archivo):\n borrado = False\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n for i in range(64):\n name = file.read(15)\n aux = name.strip()\n if aux == archivo:\n file.seek(file.tell() - 15)\n file.write('Xx.xXx.xXx.xXx.')\n borrado = True\n file.seek(file.tell() + 49)\n file.close()\n return borrado\n\n\ndef tamaArchivo(path):\n si = stat(path).st_size\n return si\n\n\ndef dePcASistema(path, nombre):\n posicion = 0\n actual = 0\n try:\n new = open(path, 'r+')\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n bandera = False\n tam = stat(path).st_size\n while bandera == False:\n name = file.read(15)\n if name == 'Xx.xXx.xXx.xXx.':\n file.seek(file.tell() - 15)\n file.write(nombre)\n actual = file.tell()\n print('El archivo fue copiado')\n bandera = True\n file.seek(file.tell() + 49)\n file.close()\n file = open('fiunamfs.img', 'r+')\n pa = clusterVacio()\n inde = 2048 * pa\n tamano = tamaArchivo(path)\n file.seek(inde)\n file.write(new.read(tamano))\n file.close()\n file = open('fiunamfs.img', 'r+')\n file.seek(actual)\n file.write(str(pa))\n file.close()\n except:\n print('Este archivo no existe')\n\n\ndef deSistemaAPc(archivo, nombre):\n tam = 0\n clu = 0\n file = open('fiunamfs.img', 'r')\n file.seek(2048)\n new = open(archivo, 'r+')\n for i in range(64):\n name = file.read(15)\n aux = name.strip()\n if aux == nombre:\n tam = file.read(8)\n clu = file.read(5)\n file.close()\n aux2 = 2048 * clu\n file = open('fiunamfs.img', 'r')\n file.seek(aux2)\n new.write(file.read(tam))\n\n\ndef nombreArchivo(path):\n tam = len(path)\n slash = 0\n name = ''\n name2 = ''\n for i in range(tam):\n if path[i] == '/':\n slash = i\n for i in range(slash + 1, tam):\n name = name + path[i]\n espaces = 15 - len(name)\n for i in range(espaces):\n name2 = name2 + ' '\n return name2 + name\n\n\nif nombre == 'FiUnamFS' and version == '0.7':\n correcto = True\n while correcto:\n tablaArchivos()\n print('Sistema de Archivos FI Unam FS')\n print('1: Listar')\n print('2: Copiar archivo')\n print('3: Copiar archivo a la computadora')\n print('4: Eliminar archivo')\n print('5: Desgramentar')\n print('6: Mostar informacion del sistema de archivos')\n print('7: Salir')\n opcion = input('Opcion: ')\n if opcion == 6:\n info()\n elif opcion == 1:\n listar()\n elif opcion == 4:\n archivo = raw_input('Nombre del archivo a borrar: ')\n if borrar(archivo):\n print('El archivo fue borrado')\n else:\n print('No se encontro el archivo')\n elif opcion == 3:\n archivo = raw_input('Nombre del archivo a copiar: ')\n nombre = nombreArchivo(archivo)\n deSistemaAPc(archivo, nombre)\n elif opcion == 2:\n archivo = raw_input('Nombre del archivo a copiar: ')\n nombre = nombreArchivo(archivo)\n dePcASistema(archivo, nombre)\n elif opcion == 9:\n print(archivos)\n print(clusters)\n print(tams)\n elif opcion == 8:\n va = clusterVacio()\n print(va)\n elif opcion == 7:\n print('Sistema desmontado')\n correcto = False\n elif opcion == 5:\n print('No se implemento')\nelse:\n print(\n 'No se puede abrir el sistema de archivos debido a que no es el archivo correcto o la version correcta. Revise nuevamente que tenga la imagen correcta.'\n )\n exit()\n",
"step-4": "from sys import exit\nfrom os import stat\nfile = open('fiunamfs.img', 'r')\nnombre = file.read(8)\nfile.seek(10)\nversion = file.read(3)\nfile.seek(20)\netiqueta = file.read(15)\nfile.seek(40)\ncluster = file.read(5)\nfile.seek(47)\nnumero = file.read(2)\nfile.seek(52)\nnumeroCompleto = file.read(8)\nfile.close()\narchivos = []\ntams = []\nclusters = []\n\n\ndef clusterVacio():\n arreAux = []\n busca = 1\n bandera = True\n for i in range(len(clusters)):\n clu = clusters[i]\n arreAux.append(int(clu[0]))\n print(arreAux)\n while bandera:\n if busca in arreAux:\n busca = busca + 1\n else:\n bandera = False\n return busca\n\n\ndef tablaArchivos():\n global archivos\n global tams\n global clusters\n archivos = []\n tams = []\n clusters = []\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n for i in range(64):\n archivos.append(file.read(15))\n tams.append(file.read(8))\n clusters.append(file.read(5))\n file.seek(file.tell() + 36)\n file.close()\n\n\ndef info():\n print('Nombre del Sistema: ' + nombre)\n print('Version: ' + version)\n print('Etiqueta del Volumen: ' + etiqueta)\n print('Tamano del cluster en bytes: ' + cluster)\n print('Numero de clusters que mide el directorio: ' + numero)\n print('Numero de cluster que mide la unidad completa: ' + numeroCompleto)\n\n\ndef listar():\n file = open('fiunamfs.img', 'r')\n file.seek(2048)\n for i in range(64):\n name = file.read(15)\n if name != 'Xx.xXx.xXx.xXx.':\n print(name)\n file.seek(file.tell() + 49)\n file.close()\n\n\ndef borrar(archivo):\n borrado = False\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n for i in range(64):\n name = file.read(15)\n aux = name.strip()\n if aux == archivo:\n file.seek(file.tell() - 15)\n file.write('Xx.xXx.xXx.xXx.')\n borrado = True\n file.seek(file.tell() + 49)\n file.close()\n return borrado\n\n\ndef tamaArchivo(path):\n si = stat(path).st_size\n return si\n\n\ndef dePcASistema(path, nombre):\n posicion = 0\n actual = 0\n try:\n new = open(path, 'r+')\n file = open('fiunamfs.img', 'r+')\n file.seek(2048)\n bandera = False\n tam = stat(path).st_size\n while bandera == False:\n name = file.read(15)\n if name == 'Xx.xXx.xXx.xXx.':\n file.seek(file.tell() - 15)\n file.write(nombre)\n actual = file.tell()\n print('El archivo fue copiado')\n bandera = True\n file.seek(file.tell() + 49)\n file.close()\n file = open('fiunamfs.img', 'r+')\n pa = clusterVacio()\n inde = 2048 * pa\n tamano = tamaArchivo(path)\n file.seek(inde)\n file.write(new.read(tamano))\n file.close()\n file = open('fiunamfs.img', 'r+')\n file.seek(actual)\n file.write(str(pa))\n file.close()\n except:\n print('Este archivo no existe')\n\n\ndef deSistemaAPc(archivo, nombre):\n tam = 0\n clu = 0\n file = open('fiunamfs.img', 'r')\n file.seek(2048)\n new = open(archivo, 'r+')\n for i in range(64):\n name = file.read(15)\n aux = name.strip()\n if aux == nombre:\n tam = file.read(8)\n clu = file.read(5)\n file.close()\n aux2 = 2048 * clu\n file = open('fiunamfs.img', 'r')\n file.seek(aux2)\n new.write(file.read(tam))\n\n\ndef nombreArchivo(path):\n tam = len(path)\n slash = 0\n name = ''\n name2 = ''\n for i in range(tam):\n if path[i] == '/':\n slash = i\n for i in range(slash + 1, tam):\n name = name + path[i]\n espaces = 15 - len(name)\n for i in range(espaces):\n name2 = name2 + ' '\n return name2 + name\n\n\nif nombre == 'FiUnamFS' and version == '0.7':\n correcto = True\n while correcto:\n tablaArchivos()\n print('Sistema de Archivos FI Unam FS')\n print('1: Listar')\n print('2: Copiar archivo')\n print('3: Copiar archivo a la computadora')\n print('4: Eliminar archivo')\n print('5: Desgramentar')\n print('6: Mostar informacion del sistema de archivos')\n print('7: Salir')\n opcion = input('Opcion: ')\n if opcion == 6:\n info()\n elif opcion == 1:\n listar()\n elif opcion == 4:\n archivo = raw_input('Nombre del archivo a borrar: ')\n if borrar(archivo):\n print('El archivo fue borrado')\n else:\n print('No se encontro el archivo')\n elif opcion == 3:\n archivo = raw_input('Nombre del archivo a copiar: ')\n nombre = nombreArchivo(archivo)\n deSistemaAPc(archivo, nombre)\n elif opcion == 2:\n archivo = raw_input('Nombre del archivo a copiar: ')\n nombre = nombreArchivo(archivo)\n dePcASistema(archivo, nombre)\n elif opcion == 9:\n print(archivos)\n print(clusters)\n print(tams)\n elif opcion == 8:\n va = clusterVacio()\n print(va)\n elif opcion == 7:\n print('Sistema desmontado')\n correcto = False\n elif opcion == 5:\n print('No se implemento')\nelse:\n print(\n 'No se puede abrir el sistema de archivos debido a que no es el archivo correcto o la version correcta. Revise nuevamente que tenga la imagen correcta.'\n )\n exit()\n",
"step-5": "from sys import exit\nfrom os import stat\n\nfile = open(\"fiunamfs.img\",\"r\")\nnombre = file.read(8)\nfile.seek(10)\nversion = file.read(3)\nfile.seek(20)\netiqueta = file.read(15)\nfile.seek(40)\ncluster = file.read(5)\nfile.seek(47)\nnumero = file.read(2)\nfile.seek(52)\nnumeroCompleto = file.read(8)\nfile.close()\n\narchivos = []\ntams = []\nclusters = []\n\ndef clusterVacio():\n arreAux = []\n busca = 1\n bandera = True\n for i in range(len(clusters)):\n clu=clusters[i]\n arreAux.append(int(clu[0]))\n print(arreAux)\n while bandera:\n if busca in arreAux:\n busca = busca + 1\n else:\n bandera = False\n return busca \n\ndef tablaArchivos():\n global archivos\n global tams\n global clusters\n archivos = []\n tams = []\n clusters = []\n file = open(\"fiunamfs.img\",\"r+\")\n file.seek(2048)\n for i in range(64):\n archivos.append(file.read(15))\n tams.append(file.read(8))\n clusters.append(file.read(5))\n file.seek(file.tell()+36)\n file.close()\n\ndef info():\n print(\"Nombre del Sistema: \" + nombre)\n print(\"Version: \" + version)\n print(\"Etiqueta del Volumen: \" + etiqueta)\n print(\"Tamano del cluster en bytes: \" + cluster)\n print(\"Numero de clusters que mide el directorio: \" + numero)\n print(\"Numero de cluster que mide la unidad completa: \" + numeroCompleto)\n\ndef listar():\n file = open(\"fiunamfs.img\",\"r\")\n file.seek(2048)\n for i in range(64):\n name = file.read(15)\n if name != 'Xx.xXx.xXx.xXx.':\n print(name)\n file.seek(file.tell()+49)\n file.close()\n\ndef borrar(archivo):\n borrado = False\n file = open(\"fiunamfs.img\",\"r+\")\n file.seek(2048)\n for i in range(64):\n name = file.read(15)\n aux = name.strip()\n if aux == archivo:\n file.seek(file.tell()-15)\n file.write('Xx.xXx.xXx.xXx.')\n borrado = True\n file.seek(file.tell()+49)\n file.close()\n return borrado\n\ndef tamaArchivo(path):\n si = stat(path).st_size\n return si\n\ndef dePcASistema(path, nombre):\n posicion =0\n actual =0\n try:\n new = open(path,\"r+\")\n file = open(\"fiunamfs.img\",\"r+\")\n file.seek(2048)\n bandera = False\n tam = stat(path).st_size\n while(bandera == False):\n name = file.read(15)\n if (name == 'Xx.xXx.xXx.xXx.'):\n file.seek(file.tell()-15)\n file.write(nombre)\n actual = file.tell()\n print(\"El archivo fue copiado\")\n bandera = True\n file.seek(file.tell()+49)\n file.close()\n file = open(\"fiunamfs.img\",\"r+\")\n pa = clusterVacio()\n inde = 2048*pa\n tamano = tamaArchivo(path)\n file.seek(inde)\n file.write(new.read(tamano))\n file.close()\n file = open(\"fiunamfs.img\",\"r+\")\n file.seek(actual)\n file.write(str(pa))\n file.close()\n except:\n print(\"Este archivo no existe\")\n \ndef deSistemaAPc(archivo,nombre):\n tam = 0 \n clu = 0\n file = open(\"fiunamfs.img\",\"r\") #Se abre el archivo en modo solo lectura\n file.seek(2048) #Se salta el superbloque \n new = open(archivo,\"r+\")\n for i in range(64):\n name = file.read(15)\n aux = name.strip()\n if (aux == nombre):\n tam = file.read(8)\n clu = file.read(5)\n file.close()\n aux2 = 2048*clu\n file = open(\"fiunamfs.img\",\"r\")\n file.seek(aux2)\n new.write(file.read(tam))\n \n \ndef nombreArchivo(path):\n tam = len(path)\n slash = 0\n name = ''\n name2 = ''\n for i in range(tam):\n if (path[i] == '/'):\n slash = i\n for i in range(slash+1,tam):\n name = name + path[i]\n ##Agregar funcion de limiar nombres de los archivos a 15 caracteres\n espaces = 15 - len(name)\n for i in range (espaces):\n name2 = name2 + \" \"\n return name2 + name\n \n\n \n\nif (nombre == \"FiUnamFS\" and version == \"0.7\"):\n correcto = True\n while(correcto):\n tablaArchivos()\n print(\"Sistema de Archivos FI Unam FS\")\n print(\"1: Listar\")\n print(\"2: Copiar archivo\")\n print(\"3: Copiar archivo a la computadora\")\n print(\"4: Eliminar archivo\")\n print(\"5: Desgramentar\")\n print(\"6: Mostar informacion del sistema de archivos\")\n print(\"7: Salir\")\n opcion = input(\"Opcion: \")\n if opcion == 6:\n info()\n elif opcion == 1:\n listar()\n elif opcion == 4:\n archivo = raw_input(\"Nombre del archivo a borrar: \")\n if(borrar(archivo)):\n print('El archivo fue borrado')\n else:\n print('No se encontro el archivo')\n elif opcion == 3:\n archivo = raw_input(\"Nombre del archivo a copiar: \")\n nombre = nombreArchivo(archivo)\n deSistemaAPc(archivo, nombre)\n elif opcion == 2:\n archivo = raw_input(\"Nombre del archivo a copiar: \")\n nombre = nombreArchivo(archivo)\n dePcASistema(archivo, nombre)\n elif opcion == 9:\n print(archivos)\n print(clusters)\n print(tams)\n elif opcion == 8:\n va = clusterVacio()\n print (va)\n elif opcion == 7:\n print(\"Sistema desmontado\")\n correcto = False\n elif opcion == 5:\n print(\"No se implemento\")\nelse:\n print(\"No se puede abrir el sistema de archivos debido a que no es el archivo correcto o la version correcta. Revise nuevamente que tenga la imagen correcta.\")\n exit()\n\n \n\n\n",
"step-ids": [
8,
9,
10,
12,
13
]
}
|
[
8,
9,
10,
12,
13
] |
frase = "todos somos promgramadores"
palabras = frase.split()
for p in palabras:
print(palabras[p])
#if p[-2] == "o":
|
normal
|
{
"blob_id": "00c57e7e26a3181ab23697a25257aca479d9ee05",
"index": 5755,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor p in palabras:\n print(palabras[p])\n",
"step-3": "frase = 'todos somos promgramadores'\npalabras = frase.split()\nfor p in palabras:\n print(palabras[p])\n",
"step-4": "frase = \"todos somos promgramadores\"\r\npalabras = frase.split()\r\nfor p in palabras:\r\n print(palabras[p])\r\n\r\n\r\n #if p[-2] == \"o\":\r\n \r\n ",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 22 19:29:50 2017
@author: marcos
"""
from sklearn.cluster import KMeans
from sklearn.utils import shuffle
from classes.imagem import Imagem
import numpy as np
def mudaCor(img, metodo='average', nTons=256):
nova = Imagem((img.altura, img.largura))
for x in range(img.largura):
for y in range(img.altura):
r,g,b = img[y][x]
if metodo == 'average':
avg = (r + g + b) / 3.0
nova[y][x] = (avg, avg, avg)
elif metodo == 'r':
nova[y][x] = (r,r,r)
elif metodo == 'inv':
nova[y][x] = (255-r, 255-g, 255-b)
else:
nova[y][x] = (r,g,b)
return nova
def balanco(img, ar, ag, ab):
nova = Imagem((img.altura, img.largura))
for y in range(img.altura):
for x in range(img.largura):
r,g,b = img[y][x]
R = int(ar*r)
G = int(ar*g)
B = int(ar*b)
nova[y][x] = (R,G,B)
return nova
def binaria(img):
nova = img.copia()
dados = img.arrLin()
paleta = [[0,0,0], [255,255,255]]
nClusters = 2
amostraAleatoria = shuffle(dados, random_state=0)[:1000]
km = KMeans(nClusters).fit(amostraAleatoria)
labels = km.predict(dados)
for x,label in enumerate(labels):
i = x // img.largura
j = x % img.largura
r,g,b = paleta[label]
nova[i][j] = (r,g,b)
return nova
def propaga(tup, fator):
r,g,b = tup
return (r + fator, g + fator, b + fator)
# Floyd-Steinberg Dithering
def floyd(img):
nova = mudaCor(img, 'average') # Mudar para luminosity, apos implementacao
for y in range(img.altura):
for x in range(img.largura):
r,g,b = nova[y][x]
if r >= 255//2:
nova[y][x] = (255, 255, 255)
else:
nova[y][x] = (0, 0, 0)
quantErro = r - nova[y][x][0]
if x+1 < img.largura:
nova[y][x+1] = propaga(nova[y][x+1], quantErro * 7/16)
if y+1 < img.altura:
if x-1 >= 0:
nova[y+1][x-1] = propaga(nova[y+1][x-1], quantErro * 3/16)
nova[y+1][x] = propaga(nova[y+1][x], quantErro * 5/16)
if x+1 < img.largura:
nova[y+1][x+1] = propaga(nova[y+1][x+1], quantErro * 1/16)
return nova
# Ordered Dithering com matriz de Bayer
def bayer(img):
matriz = np.array([[0,60], [45, 110]])
dim = matriz.shape[0]
nova = Imagem((img.altura, img.largura))
for y in range(img.altura):
for x in range(img.largura):
r,g,b = img[y][x]
Y = (r + g + b) / 3.0 # Mudar para luminancia (luminosity) apos implementado
if Y > matriz[y % dim][x % dim]:
nova[y][x] = (255, 255, 255)
else:
nova[y][x] = (0, 0, 0)
return nova
|
normal
|
{
"blob_id": "1f7007fcea490a8b28bd72163f99b32e81308878",
"index": 4834,
"step-1": "<mask token>\n\n\ndef balanco(img, ar, ag, ab):\n nova = Imagem((img.altura, img.largura))\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = img[y][x]\n R = int(ar * r)\n G = int(ar * g)\n B = int(ar * b)\n nova[y][x] = R, G, B\n return nova\n\n\n<mask token>\n\n\ndef propaga(tup, fator):\n r, g, b = tup\n return r + fator, g + fator, b + fator\n\n\ndef floyd(img):\n nova = mudaCor(img, 'average')\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = nova[y][x]\n if r >= 255 // 2:\n nova[y][x] = 255, 255, 255\n else:\n nova[y][x] = 0, 0, 0\n quantErro = r - nova[y][x][0]\n if x + 1 < img.largura:\n nova[y][x + 1] = propaga(nova[y][x + 1], quantErro * 7 / 16)\n if y + 1 < img.altura:\n if x - 1 >= 0:\n nova[y + 1][x - 1] = propaga(nova[y + 1][x - 1], \n quantErro * 3 / 16)\n nova[y + 1][x] = propaga(nova[y + 1][x], quantErro * 5 / 16)\n if x + 1 < img.largura:\n nova[y + 1][x + 1] = propaga(nova[y + 1][x + 1], \n quantErro * 1 / 16)\n return nova\n\n\ndef bayer(img):\n matriz = np.array([[0, 60], [45, 110]])\n dim = matriz.shape[0]\n nova = Imagem((img.altura, img.largura))\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = img[y][x]\n Y = (r + g + b) / 3.0\n if Y > matriz[y % dim][x % dim]:\n nova[y][x] = 255, 255, 255\n else:\n nova[y][x] = 0, 0, 0\n return nova\n",
"step-2": "<mask token>\n\n\ndef mudaCor(img, metodo='average', nTons=256):\n nova = Imagem((img.altura, img.largura))\n for x in range(img.largura):\n for y in range(img.altura):\n r, g, b = img[y][x]\n if metodo == 'average':\n avg = (r + g + b) / 3.0\n nova[y][x] = avg, avg, avg\n elif metodo == 'r':\n nova[y][x] = r, r, r\n elif metodo == 'inv':\n nova[y][x] = 255 - r, 255 - g, 255 - b\n else:\n nova[y][x] = r, g, b\n return nova\n\n\ndef balanco(img, ar, ag, ab):\n nova = Imagem((img.altura, img.largura))\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = img[y][x]\n R = int(ar * r)\n G = int(ar * g)\n B = int(ar * b)\n nova[y][x] = R, G, B\n return nova\n\n\n<mask token>\n\n\ndef propaga(tup, fator):\n r, g, b = tup\n return r + fator, g + fator, b + fator\n\n\ndef floyd(img):\n nova = mudaCor(img, 'average')\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = nova[y][x]\n if r >= 255 // 2:\n nova[y][x] = 255, 255, 255\n else:\n nova[y][x] = 0, 0, 0\n quantErro = r - nova[y][x][0]\n if x + 1 < img.largura:\n nova[y][x + 1] = propaga(nova[y][x + 1], quantErro * 7 / 16)\n if y + 1 < img.altura:\n if x - 1 >= 0:\n nova[y + 1][x - 1] = propaga(nova[y + 1][x - 1], \n quantErro * 3 / 16)\n nova[y + 1][x] = propaga(nova[y + 1][x], quantErro * 5 / 16)\n if x + 1 < img.largura:\n nova[y + 1][x + 1] = propaga(nova[y + 1][x + 1], \n quantErro * 1 / 16)\n return nova\n\n\ndef bayer(img):\n matriz = np.array([[0, 60], [45, 110]])\n dim = matriz.shape[0]\n nova = Imagem((img.altura, img.largura))\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = img[y][x]\n Y = (r + g + b) / 3.0\n if Y > matriz[y % dim][x % dim]:\n nova[y][x] = 255, 255, 255\n else:\n nova[y][x] = 0, 0, 0\n return nova\n",
"step-3": "<mask token>\n\n\ndef mudaCor(img, metodo='average', nTons=256):\n nova = Imagem((img.altura, img.largura))\n for x in range(img.largura):\n for y in range(img.altura):\n r, g, b = img[y][x]\n if metodo == 'average':\n avg = (r + g + b) / 3.0\n nova[y][x] = avg, avg, avg\n elif metodo == 'r':\n nova[y][x] = r, r, r\n elif metodo == 'inv':\n nova[y][x] = 255 - r, 255 - g, 255 - b\n else:\n nova[y][x] = r, g, b\n return nova\n\n\ndef balanco(img, ar, ag, ab):\n nova = Imagem((img.altura, img.largura))\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = img[y][x]\n R = int(ar * r)\n G = int(ar * g)\n B = int(ar * b)\n nova[y][x] = R, G, B\n return nova\n\n\ndef binaria(img):\n nova = img.copia()\n dados = img.arrLin()\n paleta = [[0, 0, 0], [255, 255, 255]]\n nClusters = 2\n amostraAleatoria = shuffle(dados, random_state=0)[:1000]\n km = KMeans(nClusters).fit(amostraAleatoria)\n labels = km.predict(dados)\n for x, label in enumerate(labels):\n i = x // img.largura\n j = x % img.largura\n r, g, b = paleta[label]\n nova[i][j] = r, g, b\n return nova\n\n\ndef propaga(tup, fator):\n r, g, b = tup\n return r + fator, g + fator, b + fator\n\n\ndef floyd(img):\n nova = mudaCor(img, 'average')\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = nova[y][x]\n if r >= 255 // 2:\n nova[y][x] = 255, 255, 255\n else:\n nova[y][x] = 0, 0, 0\n quantErro = r - nova[y][x][0]\n if x + 1 < img.largura:\n nova[y][x + 1] = propaga(nova[y][x + 1], quantErro * 7 / 16)\n if y + 1 < img.altura:\n if x - 1 >= 0:\n nova[y + 1][x - 1] = propaga(nova[y + 1][x - 1], \n quantErro * 3 / 16)\n nova[y + 1][x] = propaga(nova[y + 1][x], quantErro * 5 / 16)\n if x + 1 < img.largura:\n nova[y + 1][x + 1] = propaga(nova[y + 1][x + 1], \n quantErro * 1 / 16)\n return nova\n\n\ndef bayer(img):\n matriz = np.array([[0, 60], [45, 110]])\n dim = matriz.shape[0]\n nova = Imagem((img.altura, img.largura))\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = img[y][x]\n Y = (r + g + b) / 3.0\n if Y > matriz[y % dim][x % dim]:\n nova[y][x] = 255, 255, 255\n else:\n nova[y][x] = 0, 0, 0\n return nova\n",
"step-4": "<mask token>\nfrom sklearn.cluster import KMeans\nfrom sklearn.utils import shuffle\nfrom classes.imagem import Imagem\nimport numpy as np\n\n\ndef mudaCor(img, metodo='average', nTons=256):\n nova = Imagem((img.altura, img.largura))\n for x in range(img.largura):\n for y in range(img.altura):\n r, g, b = img[y][x]\n if metodo == 'average':\n avg = (r + g + b) / 3.0\n nova[y][x] = avg, avg, avg\n elif metodo == 'r':\n nova[y][x] = r, r, r\n elif metodo == 'inv':\n nova[y][x] = 255 - r, 255 - g, 255 - b\n else:\n nova[y][x] = r, g, b\n return nova\n\n\ndef balanco(img, ar, ag, ab):\n nova = Imagem((img.altura, img.largura))\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = img[y][x]\n R = int(ar * r)\n G = int(ar * g)\n B = int(ar * b)\n nova[y][x] = R, G, B\n return nova\n\n\ndef binaria(img):\n nova = img.copia()\n dados = img.arrLin()\n paleta = [[0, 0, 0], [255, 255, 255]]\n nClusters = 2\n amostraAleatoria = shuffle(dados, random_state=0)[:1000]\n km = KMeans(nClusters).fit(amostraAleatoria)\n labels = km.predict(dados)\n for x, label in enumerate(labels):\n i = x // img.largura\n j = x % img.largura\n r, g, b = paleta[label]\n nova[i][j] = r, g, b\n return nova\n\n\ndef propaga(tup, fator):\n r, g, b = tup\n return r + fator, g + fator, b + fator\n\n\ndef floyd(img):\n nova = mudaCor(img, 'average')\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = nova[y][x]\n if r >= 255 // 2:\n nova[y][x] = 255, 255, 255\n else:\n nova[y][x] = 0, 0, 0\n quantErro = r - nova[y][x][0]\n if x + 1 < img.largura:\n nova[y][x + 1] = propaga(nova[y][x + 1], quantErro * 7 / 16)\n if y + 1 < img.altura:\n if x - 1 >= 0:\n nova[y + 1][x - 1] = propaga(nova[y + 1][x - 1], \n quantErro * 3 / 16)\n nova[y + 1][x] = propaga(nova[y + 1][x], quantErro * 5 / 16)\n if x + 1 < img.largura:\n nova[y + 1][x + 1] = propaga(nova[y + 1][x + 1], \n quantErro * 1 / 16)\n return nova\n\n\ndef bayer(img):\n matriz = np.array([[0, 60], [45, 110]])\n dim = matriz.shape[0]\n nova = Imagem((img.altura, img.largura))\n for y in range(img.altura):\n for x in range(img.largura):\n r, g, b = img[y][x]\n Y = (r + g + b) / 3.0\n if Y > matriz[y % dim][x % dim]:\n nova[y][x] = 255, 255, 255\n else:\n nova[y][x] = 0, 0, 0\n return nova\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Wed Mar 22 19:29:50 2017\n\n@author: marcos\n\"\"\"\n\nfrom sklearn.cluster import KMeans\nfrom sklearn.utils import shuffle\n\nfrom classes.imagem import Imagem\n\nimport numpy as np\n\ndef mudaCor(img, metodo='average', nTons=256):\n nova = Imagem((img.altura, img.largura))\n for x in range(img.largura):\n for y in range(img.altura):\n r,g,b = img[y][x]\n if metodo == 'average':\n avg = (r + g + b) / 3.0\n nova[y][x] = (avg, avg, avg)\n elif metodo == 'r':\n nova[y][x] = (r,r,r)\n elif metodo == 'inv':\n nova[y][x] = (255-r, 255-g, 255-b)\n else:\n nova[y][x] = (r,g,b)\n\n return nova\n\ndef balanco(img, ar, ag, ab):\n nova = Imagem((img.altura, img.largura))\n\n for y in range(img.altura):\n for x in range(img.largura):\n r,g,b = img[y][x]\n R = int(ar*r)\n G = int(ar*g)\n B = int(ar*b)\n nova[y][x] = (R,G,B)\n\n return nova\n\ndef binaria(img):\n nova = img.copia()\n dados = img.arrLin()\n paleta = [[0,0,0], [255,255,255]]\n nClusters = 2\n amostraAleatoria = shuffle(dados, random_state=0)[:1000]\n km = KMeans(nClusters).fit(amostraAleatoria)\n labels = km.predict(dados)\n for x,label in enumerate(labels):\n i = x // img.largura\n j = x % img.largura\n r,g,b = paleta[label]\n nova[i][j] = (r,g,b)\n\n return nova\n\ndef propaga(tup, fator):\n r,g,b = tup\n return (r + fator, g + fator, b + fator)\n\n# Floyd-Steinberg Dithering\ndef floyd(img):\n nova = mudaCor(img, 'average') # Mudar para luminosity, apos implementacao\n\n for y in range(img.altura):\n for x in range(img.largura):\n r,g,b = nova[y][x]\n if r >= 255//2:\n nova[y][x] = (255, 255, 255)\n else:\n nova[y][x] = (0, 0, 0)\n quantErro = r - nova[y][x][0]\n\n if x+1 < img.largura:\n nova[y][x+1] = propaga(nova[y][x+1], quantErro * 7/16)\n if y+1 < img.altura:\n if x-1 >= 0:\n nova[y+1][x-1] = propaga(nova[y+1][x-1], quantErro * 3/16)\n nova[y+1][x] = propaga(nova[y+1][x], quantErro * 5/16)\n if x+1 < img.largura:\n nova[y+1][x+1] = propaga(nova[y+1][x+1], quantErro * 1/16)\n\n return nova\n\n# Ordered Dithering com matriz de Bayer\ndef bayer(img):\n matriz = np.array([[0,60], [45, 110]])\n dim = matriz.shape[0]\n\n nova = Imagem((img.altura, img.largura))\n\n for y in range(img.altura):\n for x in range(img.largura):\n r,g,b = img[y][x]\n Y = (r + g + b) / 3.0 # Mudar para luminancia (luminosity) apos implementado\n if Y > matriz[y % dim][x % dim]:\n nova[y][x] = (255, 255, 255)\n else:\n nova[y][x] = (0, 0, 0)\n\n return nova",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 9 18:52:17 2021
@author: lewis
"""
import csv
import pandas as pd
import re
import statistics
import matplotlib.pyplot as plt
import numpy as np
from bs4 import BeautifulSoup
from urllib.request import urlopen
#Creating a function that groups by, counts, creates a new column from the index, drops the index and changes the column names
def groupby_count(df, groupby_column, count_column):
new_df = pd.DataFrame(df.groupby(groupby_column)[count_column].count())
new_df.columns = ['count']
new_df[groupby_column] = new_df.index.get_level_values(0)
new_df.reset_index(drop = True, inplace = True)
return(new_df)
url = 'https://en.wikipedia.org/wiki/Film_series'
html = urlopen(url)
soup = BeautifulSoup(html, 'html.parser')
tables = soup.find_all('table')
#Create a function to process the string into an integer by using re.sub()
def process_num(num):
return float(re.sub(r'[^\w\s.]','',num))
#test function
num1 = float(re.sub(r'[^\w\s.]','','1,156.30'))
#print(num1)
#Create array to hold the data extracted
gross=[]
year=[]
film=[]
for table in tables:
rows = table.find_all('tr')
for row in rows:
cells = row.find_all('td')
if len(cells) > 1:
Franchise = cells[1]
film.append(Franchise.text.strip())
Gross = cells[6]
gross.append(process_num(Gross.text.strip()))
first = cells[7]
year.append(int(first.text))
# put the data in the pandas dataframe
movie_df= pd.DataFrame({'Gross': gross,
'first': year,
'Franchise': film
})
#print(movie_df)
#print(movie_df.dtypes)
#movies_df_count = movie_df.groupby(["Franchise", "first"])["first"].count()
#print(movies_df_count)
#WIKI_df=movie_df.groupby(["first"])["first"].count()
#print(WIKI_df)
#WIKI_df.plot(kind='bar',x='first',y='count')
#plt.title("Most Movies Release count by Year(Top 68 on WIKI)",fontsize=20)
#TMDB Kaggle Data
movies_TMDB_kaggle= pd.read_csv(r'C:/Users/lewis/OneDrive/Documents/MovieData/tmdb_5000_movies.csv', encoding= 'ISO-8859-1')
#print(len(movies_TMDB_kaggle)) #result 4803 and 20 columns
#print(movies_TMDB_kaggle.isnull().sum()) #tagline and homepage has the most NaN, unnecessary columns
#Clean the dataframe, removed any unnecessary columns
clean_TMDB_movies= movies_TMDB_kaggle.drop(columns=['homepage', 'id', 'overview', 'status', 'tagline', 'original_title'])
#print(clean_TMDB_movies) #result 4803 rows and 14 columns
#print(clean_TMDB_movies.isnull().sum()) # NaNs in the release_date and runtime column
clean_TMDB_movies.dropna(inplace= True)
#print(clean_TMDB_movies.isnull().sum())
#Removing any movie that has a budget of 0
clean_TMDB_movies = clean_TMDB_movies[clean_TMDB_movies['budget'] != 0]
#Removing any movie with a revenue of 0
clean_TMDB_movies = clean_TMDB_movies[clean_TMDB_movies['revenue'] != 0]
#review the profit for each movie therefore a profit column was created
clean_TMDB_movies['profit'] = clean_TMDB_movies['revenue'] - clean_TMDB_movies['budget']
#Creating a percent profit column in order to compare profits.
clean_TMDB_movies['percent_profit'] = clean_TMDB_movies['profit']/clean_TMDB_movies['budget']*100
#print the top five
#print(clean_TMDB_movies.head())
#checking the data types
#print(clean_TMDB_movies.dtypes)
#change release_date to the date/time and separate it by month, day, and year
clean_TMDB_movies['release_date'] = pd.to_datetime(clean_TMDB_movies['release_date'])
clean_TMDB_movies['month'], clean_TMDB_movies['day'] = clean_TMDB_movies['release_date'].dt.month, clean_TMDB_movies['release_date'].dt.day
#After new columns were added it is time to concat.
cat = list(range(1,13))
#Changing the month data type from int to ordered category
clean_TMDB_movies['month'] = pd.Categorical(clean_TMDB_movies['month'], ordered = True, categories = cat)
#confirmation
#print(clean_TMDB_movies.month.dtype)
#print(len(clean_TMDB_movies))
#print(clean_TMDB_movies.describe())
#print(clean_TMDB_movies.revenue.describe())
#print(clean_TMDB_movies.profit.describe())
#print(clean_TMDB_movies.vote_count.describe())
#print(clean_TMDB_movies.percent_profit.describe())
#discretize the budget column
categories = ["very_low", "low", "high", "very_high"]
#saving the clean_TMDB df as a discretized df
movies_discretized = clean_TMDB_movies
#creating a budget cutoff using pandas cut function
movies_discretized["budget"] = pd.cut(movies_discretized["budget"], [0, 13000000, 30000000, 62192550, 400000000], labels = categories)
#repeat the step for revenue
#print(movies_discretized.revenue.describe())
movies_discretized["revenue"] = pd.cut(movies_discretized["revenue"], [0, 21458200, 62954020, 187976900, 2887965000], labels = categories)
#profit
categories_profit = ["negative", "low", "high", "very_high"]
movies_discretized["profit"] = pd.cut(movies_discretized["profit"], [-165710100 , 0, 29314900, 140784100, 2560965000], labels = categories_profit)
#print(movies_discretized["profit"].head())
#Vote_average-very_low: vote averages less than 6, low are between 6 to 6.5, high between 6.5 and 7 and very_high 7 and 8.5
movies_discretized["vote_average"] = pd.cut(movies_discretized["vote_average"], [0, 6, 6.5, 7, 8.5], labels = categories)
#print(movies_discretized["vote_average"].head())
#Vote_count
movies_discretized["vote_count"] = pd.cut(movies_discretized["vote_count"], [0, 440, 1151, 2522, 14000], labels = categories)
#print(movies_discretized["vote_count"].head())
#percent_profit
movies_discretized["percent_profit"] = pd.cut(movies_discretized["percent_profit"], [-100, 0, 108, 436, 6528], labels = categories_profit)
movies_discretized["percent_profit"]
#Categorizing days into weeks
#print(movies_discretized.day.describe())
categories_weeks = ["week_1", "week_2", "week_3", "week_4"]
movies_discretized["week"] = pd.cut(movies_discretized["day"], [0, 8, 15, 22, 32], labels = categories_weeks)
#print(movies_discretized["week"].head())
#day and release_date are no longer needed columns
movies_discretized.drop(columns=['day', 'release_date'], inplace = True)
#print(movies_discretized.head())
#Do major production companies have an impact the profit margin?
production_company = []
for movie in movies_discretized['production_companies']:
if "Universal" in movie:
production_company.append("Universal")
elif "Sony" in movie:
production_company.append("Sony")
elif "Fox" in movie:
production_company.append("Fox")
elif "DreamWorks" in movie:
production_company.append("DW")
elif "MGM" in movie:
production_company.append("MGM")
elif "Paramount" in movie:
production_company.append("Paramount")
elif "Disney" in movie:
production_company.append("Disney")
elif "Warner Bros" in movie:
production_company.append("WB")
else:
production_company.append("None")
movies_discretized["main_production"] = production_company
#print(movies_discretized["main_production"].head())
movies_discretized_count = movies_discretized.groupby(["main_production", "percent_profit"])["main_production"].count()
movies_discretized_count_df= pd.DataFrame(movies_discretized_count)
#print(movies_discretized_count_df)
#change the last column to count instead of main production
movies_discretized_count_df.columns = ["counts"]
#print(movies_discretized_count_df.head())
#total count for the number of percent_profit counts for each main production.
movies_discretized_count_df["production_company"]=movies_discretized_count_df.index.get_level_values(0)
movies_discretized_count_df["percent_profit_category"] = movies_discretized_count_df.index.get_level_values(1)
#print(movies_discretized_count_df)
#drop the indexes to create another column with the sum of the counts of each production
movies_discretized_count_df = movies_discretized_count_df.reset_index(drop = True)
#The sum of each production company category.
production_company_discretized_count_df = movies_discretized_count_df.groupby(["production_company"])["counts"].sum()
#print(production_company_discretized_count_df)
#column with the overall counts for each production, construct a new column called production company count that replicates the production company, and then use the replace function to replace the 1s and 2s with the total count
movies_discretized_count_df["production_company_count"] = movies_discretized_count_df["production_company"]
#Now replacing the income level with the total count for each income level
movies_discretized_count_df["production_company_count"] = movies_discretized_count_df["production_company_count"].replace(["DW"], 82)
movies_discretized_count_df["production_company_count"] = movies_discretized_count_df["production_company_count"].replace(["Disney"], 116)
movies_discretized_count_df["production_company_count"] = movies_discretized_count_df["production_company_count"].replace(["Fox"], 298)
movies_discretized_count_df["production_company_count"] = movies_discretized_count_df["production_company_count"].replace(["MGM"], 87)
movies_discretized_count_df["production_company_count"] = movies_discretized_count_df["production_company_count"].replace(["None"], 1782)
movies_discretized_count_df["production_company_count"] = movies_discretized_count_df["production_company_count"].replace(["Paramount"], 235)
movies_discretized_count_df["production_company_count"] = movies_discretized_count_df["production_company_count"].replace(["Sony"], 42)
movies_discretized_count_df["production_company_count"] = movies_discretized_count_df["production_company_count"].replace(["Universal"], 282)
movies_discretized_count_df["production_company_count"] = movies_discretized_count_df["production_company_count"].replace(["WB"], 269)
#print(movies_discretized_count_df)
#percentage
movies_discretized_count_df["percent"] = movies_discretized_count_df["counts"]/movies_discretized_count_df["production_company_count"] *100
#print(movies_discretized_count_df.head())
#dropping production_company_count and count column no longer needed
movies_discretized_count_df.drop(["counts", "production_company_count"], axis = 1, inplace = True )
#graphing question 1 using Matplot lib
#graph = movies_discretized_count_df.pivot("production_company", "percent_profit_category","percent").plot(kind="bar", color= ['blue', 'green', 'purple', 'red'], title='Profit Margin amongst Production Companies')
#change the x and y axis for graph
#plt.ylabel("Percent Profit")
#plt.xlabel("Production")
#plt.xticks(rotation = 0)
#position the legends underneath the graph; Now the graph looks beautiful
#plt.legend( loc = "lower center", bbox_to_anchor = (.5, -.4), ncol = 4, title = "Percent Profit Category")
#plt.show()
#Question 2: Is it true that the month in which a film is released has an impact on its profit margin?
movies_discretized_count_week = movies_discretized.groupby(["week", "percent_profit"])["week"].count()
movies_discretized_count_df_week = pd.DataFrame(movies_discretized_count_week)
#Checking the dataframe
#print(movies_discretized_count_df_week)
#changing column that is labeled week to count
movies_discretized_count_df_week.columns = ["counts"]
#total count for the number of % profit for each week
movies_discretized_count_df_week["week"]=movies_discretized_count_df_week.index.get_level_values(0)
movies_discretized_count_df_week["percent_profit_category"] = movies_discretized_count_df_week.index.get_level_values(1)
#print(movies_discretized_count_df_week)
movies_discretized_count_df_week = movies_discretized_count_df_week.reset_index(drop = True) #drop the index
#what is the sum of each production
sum_discretized_count_df_week = movies_discretized_count_df_week.groupby(["week"])["counts"].sum()
#print(sum_discretized_count_df_week) #the sums are centered around 700-800s
movies_discretized_count_df_week["week_count"] = movies_discretized_count_df_week["week"]
#Now replacing the income level with the total count for each income level
movies_discretized_count_df_week["week_count"] = movies_discretized_count_df_week["week_count"].replace(["week_1"], 783)
movies_discretized_count_df_week["week_count"] = movies_discretized_count_df_week["week_count"].replace(["week_2"], 817)
movies_discretized_count_df_week["week_count"] = movies_discretized_count_df_week["week_count"].replace(["week_3"], 782)
movies_discretized_count_df_week["week_count"] = movies_discretized_count_df_week["week_count"].replace(["week_4"], 811)
#print(movies_discretized_count_df_week.head())
#received an error Object with dtype category cannot perform the numpy op true_divide
movies_discretized_count_df_week["week_count"]= movies_discretized_count_df_week["week_count"].astype(np.int64)
#convert into percentage; counts/week_count * 100
movies_discretized_count_df_week["percent"] = movies_discretized_count_df_week["counts"]/movies_discretized_count_df_week["week_count"] *100
#print(movies_discretized_count_df_week.head())
#dropping the week_count and count column since the percent column is there those columns are no longer needed
movies_discretized_count_df_week.drop(["counts", "week_count"], axis = 1, inplace = True )
#Time to create a visual
#graph_question_2 = movies_discretized_count_df_week.pivot("week", "percent_profit_category", "percent").plot(kind="bar", color = ["blue", "green", "purple", "red"], title = "Impact of Percent Profit by Week")
#plt.ylabel("Percent")
#plt.xlabel("Week")
#plt.xticks(rotation = 0)
#plt.legend( loc = "lower center", bbox_to_anchor = (.5, -.4), ncol = 4, title = "Percent Profit")
#plt.show()
#IMDb Kaggle Data
movies_IMDb= pd.read_csv(r'C:/Users/lewis/OneDrive/Documents/MovieData/IMDb_movies.csv')
clean_IMDb= movies_IMDb.drop(columns=['imdb_title_id','original_title','description', 'reviews_from_users', 'reviews_from_critics'])
#print(clean_IMDb) #85,855 rows and 17 columns
#print(clean_IMDb.isnull().sum())
clean_IMDb.dropna(inplace = True) #drop all the NaNs
#print(clean_IMDb.isnull().sum()) #no more NaNs
#print(len(clean_IMDb)) #6635
#print(clean_IMDb.dtypes)
# QUESTION 3: How does budget impact vote average?
#plt.plot(clean_IMDb.budget, clean_IMDb.avg_vote, 'o')
#plt.title('How does Budget Impact Vote Average?')
#plt.xlabel('Budget')
#plt.ylabel('Vote Average')
#plt.show()
#print(clean_IMDb['budget'].head())
#print the top five
#print(clean_IMDb.head())
#Using the groupby_count function that takes the following arguments (df, groupby_column, count_column)
IMDb_movies_genre = groupby_count(clean_IMDb, 'genre', 'genre')
#Sorting the df, so the bar graph will be in descending order
IMDb_movies_genre.sort_values(['count'], ascending=[False], inplace = True)
#Statista movie theatre revenue and prediction to 2025 post COVID saving to a pd dataframe
revenue_covid= pd.read_csv(r'C:/Users/lewis/OneDrive/Documents/MovieData/revenue_covid_impact.csv')
print(revenue_covid)
AMC_revenue= pd.read_csv(r'C:/Users/lewis/OneDrive/Documents/MovieData/AMC.csv')
#print(AMC_revenue)
#print(AMC_revenue.info())
print(AMC_revenue.head())
#During 2020, AMC Theatres reported annual revenues of 1.24 billion U.S. dollars, a dramatic decrease from previous years as a consequence of the COVID-19 pandemic.
plt.plot(AMC_revenue.Year, AMC_revenue.Money, 'o')
plt.title('AMC revenue over 15 years')
plt.xlabel('Year')
plt.ylabel('Revenue')
plt.show()
#Global box office revenue coronavirus impact 2020-2025
#revenue_covid.plot(x="Year", y=["Originalforecast", "Marchrevision", "Julyrevision"], kind="bar")
#plt.show()
|
normal
|
{
"blob_id": "30b07e57737ac29643769c4773591199b2ba8656",
"index": 2184,
"step-1": "<mask token>\n\n\ndef groupby_count(df, groupby_column, count_column):\n new_df = pd.DataFrame(df.groupby(groupby_column)[count_column].count())\n new_df.columns = ['count']\n new_df[groupby_column] = new_df.index.get_level_values(0)\n new_df.reset_index(drop=True, inplace=True)\n return new_df\n\n\n<mask token>\n\n\ndef process_num(num):\n return float(re.sub('[^\\\\w\\\\s.]', '', num))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef groupby_count(df, groupby_column, count_column):\n new_df = pd.DataFrame(df.groupby(groupby_column)[count_column].count())\n new_df.columns = ['count']\n new_df[groupby_column] = new_df.index.get_level_values(0)\n new_df.reset_index(drop=True, inplace=True)\n return new_df\n\n\n<mask token>\n\n\ndef process_num(num):\n return float(re.sub('[^\\\\w\\\\s.]', '', num))\n\n\n<mask token>\nfor table in tables:\n rows = table.find_all('tr')\n for row in rows:\n cells = row.find_all('td')\n if len(cells) > 1:\n Franchise = cells[1]\n film.append(Franchise.text.strip())\n Gross = cells[6]\n gross.append(process_num(Gross.text.strip()))\n first = cells[7]\n year.append(int(first.text))\n<mask token>\nclean_TMDB_movies.dropna(inplace=True)\n<mask token>\nmovies_discretized['percent_profit']\n<mask token>\nmovies_discretized.drop(columns=['day', 'release_date'], inplace=True)\n<mask token>\nfor movie in movies_discretized['production_companies']:\n if 'Universal' in movie:\n production_company.append('Universal')\n elif 'Sony' in movie:\n production_company.append('Sony')\n elif 'Fox' in movie:\n production_company.append('Fox')\n elif 'DreamWorks' in movie:\n production_company.append('DW')\n elif 'MGM' in movie:\n production_company.append('MGM')\n elif 'Paramount' in movie:\n production_company.append('Paramount')\n elif 'Disney' in movie:\n production_company.append('Disney')\n elif 'Warner Bros' in movie:\n production_company.append('WB')\n else:\n production_company.append('None')\n<mask token>\nmovies_discretized_count_df.drop(['counts', 'production_company_count'],\n axis=1, inplace=True)\n<mask token>\nmovies_discretized_count_df_week.drop(['counts', 'week_count'], axis=1,\n inplace=True)\n<mask token>\nclean_IMDb.dropna(inplace=True)\n<mask token>\nIMDb_movies_genre.sort_values(['count'], ascending=[False], inplace=True)\n<mask token>\nprint(revenue_covid)\n<mask token>\nprint(AMC_revenue.head())\nplt.plot(AMC_revenue.Year, AMC_revenue.Money, 'o')\nplt.title('AMC revenue over 15 years')\nplt.xlabel('Year')\nplt.ylabel('Revenue')\nplt.show()\n",
"step-3": "<mask token>\n\n\ndef groupby_count(df, groupby_column, count_column):\n new_df = pd.DataFrame(df.groupby(groupby_column)[count_column].count())\n new_df.columns = ['count']\n new_df[groupby_column] = new_df.index.get_level_values(0)\n new_df.reset_index(drop=True, inplace=True)\n return new_df\n\n\nurl = 'https://en.wikipedia.org/wiki/Film_series'\nhtml = urlopen(url)\nsoup = BeautifulSoup(html, 'html.parser')\ntables = soup.find_all('table')\n\n\ndef process_num(num):\n return float(re.sub('[^\\\\w\\\\s.]', '', num))\n\n\nnum1 = float(re.sub('[^\\\\w\\\\s.]', '', '1,156.30'))\ngross = []\nyear = []\nfilm = []\nfor table in tables:\n rows = table.find_all('tr')\n for row in rows:\n cells = row.find_all('td')\n if len(cells) > 1:\n Franchise = cells[1]\n film.append(Franchise.text.strip())\n Gross = cells[6]\n gross.append(process_num(Gross.text.strip()))\n first = cells[7]\n year.append(int(first.text))\nmovie_df = pd.DataFrame({'Gross': gross, 'first': year, 'Franchise': film})\nmovies_TMDB_kaggle = pd.read_csv(\n 'C:/Users/lewis/OneDrive/Documents/MovieData/tmdb_5000_movies.csv',\n encoding='ISO-8859-1')\nclean_TMDB_movies = movies_TMDB_kaggle.drop(columns=['homepage', 'id',\n 'overview', 'status', 'tagline', 'original_title'])\nclean_TMDB_movies.dropna(inplace=True)\nclean_TMDB_movies = clean_TMDB_movies[clean_TMDB_movies['budget'] != 0]\nclean_TMDB_movies = clean_TMDB_movies[clean_TMDB_movies['revenue'] != 0]\nclean_TMDB_movies['profit'] = clean_TMDB_movies['revenue'] - clean_TMDB_movies[\n 'budget']\nclean_TMDB_movies['percent_profit'] = clean_TMDB_movies['profit'\n ] / clean_TMDB_movies['budget'] * 100\nclean_TMDB_movies['release_date'] = pd.to_datetime(clean_TMDB_movies[\n 'release_date'])\nclean_TMDB_movies['month'], clean_TMDB_movies['day'] = clean_TMDB_movies[\n 'release_date'].dt.month, clean_TMDB_movies['release_date'].dt.day\ncat = list(range(1, 13))\nclean_TMDB_movies['month'] = pd.Categorical(clean_TMDB_movies['month'],\n ordered=True, categories=cat)\ncategories = ['very_low', 'low', 'high', 'very_high']\nmovies_discretized = clean_TMDB_movies\nmovies_discretized['budget'] = pd.cut(movies_discretized['budget'], [0, \n 13000000, 30000000, 62192550, 400000000], labels=categories)\nmovies_discretized['revenue'] = pd.cut(movies_discretized['revenue'], [0, \n 21458200, 62954020, 187976900, 2887965000], labels=categories)\ncategories_profit = ['negative', 'low', 'high', 'very_high']\nmovies_discretized['profit'] = pd.cut(movies_discretized['profit'], [-\n 165710100, 0, 29314900, 140784100, 2560965000], labels=categories_profit)\nmovies_discretized['vote_average'] = pd.cut(movies_discretized[\n 'vote_average'], [0, 6, 6.5, 7, 8.5], labels=categories)\nmovies_discretized['vote_count'] = pd.cut(movies_discretized['vote_count'],\n [0, 440, 1151, 2522, 14000], labels=categories)\nmovies_discretized['percent_profit'] = pd.cut(movies_discretized[\n 'percent_profit'], [-100, 0, 108, 436, 6528], labels=categories_profit)\nmovies_discretized['percent_profit']\ncategories_weeks = ['week_1', 'week_2', 'week_3', 'week_4']\nmovies_discretized['week'] = pd.cut(movies_discretized['day'], [0, 8, 15, \n 22, 32], labels=categories_weeks)\nmovies_discretized.drop(columns=['day', 'release_date'], inplace=True)\nproduction_company = []\nfor movie in movies_discretized['production_companies']:\n if 'Universal' in movie:\n production_company.append('Universal')\n elif 'Sony' in movie:\n production_company.append('Sony')\n elif 'Fox' in movie:\n production_company.append('Fox')\n elif 'DreamWorks' in movie:\n production_company.append('DW')\n elif 'MGM' in movie:\n production_company.append('MGM')\n elif 'Paramount' in movie:\n production_company.append('Paramount')\n elif 'Disney' in movie:\n production_company.append('Disney')\n elif 'Warner Bros' in movie:\n production_company.append('WB')\n else:\n production_company.append('None')\nmovies_discretized['main_production'] = production_company\nmovies_discretized_count = movies_discretized.groupby(['main_production',\n 'percent_profit'])['main_production'].count()\nmovies_discretized_count_df = pd.DataFrame(movies_discretized_count)\nmovies_discretized_count_df.columns = ['counts']\nmovies_discretized_count_df['production_company'\n ] = movies_discretized_count_df.index.get_level_values(0)\nmovies_discretized_count_df['percent_profit_category'\n ] = movies_discretized_count_df.index.get_level_values(1)\nmovies_discretized_count_df = movies_discretized_count_df.reset_index(drop=True\n )\nproduction_company_discretized_count_df = movies_discretized_count_df.groupby([\n 'production_company'])['counts'].sum()\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company']\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'DW'], 82)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'Disney'], 116)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'Fox'], 298)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'MGM'], 87)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'None'], 1782)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'Paramount'], 235)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'Sony'], 42)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'Universal'], 282)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'WB'], 269)\nmovies_discretized_count_df['percent'] = movies_discretized_count_df['counts'\n ] / movies_discretized_count_df['production_company_count'] * 100\nmovies_discretized_count_df.drop(['counts', 'production_company_count'],\n axis=1, inplace=True)\nmovies_discretized_count_week = movies_discretized.groupby(['week',\n 'percent_profit'])['week'].count()\nmovies_discretized_count_df_week = pd.DataFrame(movies_discretized_count_week)\nmovies_discretized_count_df_week.columns = ['counts']\nmovies_discretized_count_df_week['week'\n ] = movies_discretized_count_df_week.index.get_level_values(0)\nmovies_discretized_count_df_week['percent_profit_category'\n ] = movies_discretized_count_df_week.index.get_level_values(1)\nmovies_discretized_count_df_week = (movies_discretized_count_df_week.\n reset_index(drop=True))\nsum_discretized_count_df_week = movies_discretized_count_df_week.groupby([\n 'week'])['counts'].sum()\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week']\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week_count'].replace(['week_1'], 783)\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week_count'].replace(['week_2'], 817)\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week_count'].replace(['week_3'], 782)\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week_count'].replace(['week_4'], 811)\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week_count'].astype(np.int64)\nmovies_discretized_count_df_week['percent'] = movies_discretized_count_df_week[\n 'counts'] / movies_discretized_count_df_week['week_count'] * 100\nmovies_discretized_count_df_week.drop(['counts', 'week_count'], axis=1,\n inplace=True)\nmovies_IMDb = pd.read_csv(\n 'C:/Users/lewis/OneDrive/Documents/MovieData/IMDb_movies.csv')\nclean_IMDb = movies_IMDb.drop(columns=['imdb_title_id', 'original_title',\n 'description', 'reviews_from_users', 'reviews_from_critics'])\nclean_IMDb.dropna(inplace=True)\nIMDb_movies_genre = groupby_count(clean_IMDb, 'genre', 'genre')\nIMDb_movies_genre.sort_values(['count'], ascending=[False], inplace=True)\nrevenue_covid = pd.read_csv(\n 'C:/Users/lewis/OneDrive/Documents/MovieData/revenue_covid_impact.csv')\nprint(revenue_covid)\nAMC_revenue = pd.read_csv('C:/Users/lewis/OneDrive/Documents/MovieData/AMC.csv'\n )\nprint(AMC_revenue.head())\nplt.plot(AMC_revenue.Year, AMC_revenue.Money, 'o')\nplt.title('AMC revenue over 15 years')\nplt.xlabel('Year')\nplt.ylabel('Revenue')\nplt.show()\n",
"step-4": "<mask token>\nimport csv\nimport pandas as pd\nimport re\nimport statistics\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom bs4 import BeautifulSoup\nfrom urllib.request import urlopen\n\n\ndef groupby_count(df, groupby_column, count_column):\n new_df = pd.DataFrame(df.groupby(groupby_column)[count_column].count())\n new_df.columns = ['count']\n new_df[groupby_column] = new_df.index.get_level_values(0)\n new_df.reset_index(drop=True, inplace=True)\n return new_df\n\n\nurl = 'https://en.wikipedia.org/wiki/Film_series'\nhtml = urlopen(url)\nsoup = BeautifulSoup(html, 'html.parser')\ntables = soup.find_all('table')\n\n\ndef process_num(num):\n return float(re.sub('[^\\\\w\\\\s.]', '', num))\n\n\nnum1 = float(re.sub('[^\\\\w\\\\s.]', '', '1,156.30'))\ngross = []\nyear = []\nfilm = []\nfor table in tables:\n rows = table.find_all('tr')\n for row in rows:\n cells = row.find_all('td')\n if len(cells) > 1:\n Franchise = cells[1]\n film.append(Franchise.text.strip())\n Gross = cells[6]\n gross.append(process_num(Gross.text.strip()))\n first = cells[7]\n year.append(int(first.text))\nmovie_df = pd.DataFrame({'Gross': gross, 'first': year, 'Franchise': film})\nmovies_TMDB_kaggle = pd.read_csv(\n 'C:/Users/lewis/OneDrive/Documents/MovieData/tmdb_5000_movies.csv',\n encoding='ISO-8859-1')\nclean_TMDB_movies = movies_TMDB_kaggle.drop(columns=['homepage', 'id',\n 'overview', 'status', 'tagline', 'original_title'])\nclean_TMDB_movies.dropna(inplace=True)\nclean_TMDB_movies = clean_TMDB_movies[clean_TMDB_movies['budget'] != 0]\nclean_TMDB_movies = clean_TMDB_movies[clean_TMDB_movies['revenue'] != 0]\nclean_TMDB_movies['profit'] = clean_TMDB_movies['revenue'] - clean_TMDB_movies[\n 'budget']\nclean_TMDB_movies['percent_profit'] = clean_TMDB_movies['profit'\n ] / clean_TMDB_movies['budget'] * 100\nclean_TMDB_movies['release_date'] = pd.to_datetime(clean_TMDB_movies[\n 'release_date'])\nclean_TMDB_movies['month'], clean_TMDB_movies['day'] = clean_TMDB_movies[\n 'release_date'].dt.month, clean_TMDB_movies['release_date'].dt.day\ncat = list(range(1, 13))\nclean_TMDB_movies['month'] = pd.Categorical(clean_TMDB_movies['month'],\n ordered=True, categories=cat)\ncategories = ['very_low', 'low', 'high', 'very_high']\nmovies_discretized = clean_TMDB_movies\nmovies_discretized['budget'] = pd.cut(movies_discretized['budget'], [0, \n 13000000, 30000000, 62192550, 400000000], labels=categories)\nmovies_discretized['revenue'] = pd.cut(movies_discretized['revenue'], [0, \n 21458200, 62954020, 187976900, 2887965000], labels=categories)\ncategories_profit = ['negative', 'low', 'high', 'very_high']\nmovies_discretized['profit'] = pd.cut(movies_discretized['profit'], [-\n 165710100, 0, 29314900, 140784100, 2560965000], labels=categories_profit)\nmovies_discretized['vote_average'] = pd.cut(movies_discretized[\n 'vote_average'], [0, 6, 6.5, 7, 8.5], labels=categories)\nmovies_discretized['vote_count'] = pd.cut(movies_discretized['vote_count'],\n [0, 440, 1151, 2522, 14000], labels=categories)\nmovies_discretized['percent_profit'] = pd.cut(movies_discretized[\n 'percent_profit'], [-100, 0, 108, 436, 6528], labels=categories_profit)\nmovies_discretized['percent_profit']\ncategories_weeks = ['week_1', 'week_2', 'week_3', 'week_4']\nmovies_discretized['week'] = pd.cut(movies_discretized['day'], [0, 8, 15, \n 22, 32], labels=categories_weeks)\nmovies_discretized.drop(columns=['day', 'release_date'], inplace=True)\nproduction_company = []\nfor movie in movies_discretized['production_companies']:\n if 'Universal' in movie:\n production_company.append('Universal')\n elif 'Sony' in movie:\n production_company.append('Sony')\n elif 'Fox' in movie:\n production_company.append('Fox')\n elif 'DreamWorks' in movie:\n production_company.append('DW')\n elif 'MGM' in movie:\n production_company.append('MGM')\n elif 'Paramount' in movie:\n production_company.append('Paramount')\n elif 'Disney' in movie:\n production_company.append('Disney')\n elif 'Warner Bros' in movie:\n production_company.append('WB')\n else:\n production_company.append('None')\nmovies_discretized['main_production'] = production_company\nmovies_discretized_count = movies_discretized.groupby(['main_production',\n 'percent_profit'])['main_production'].count()\nmovies_discretized_count_df = pd.DataFrame(movies_discretized_count)\nmovies_discretized_count_df.columns = ['counts']\nmovies_discretized_count_df['production_company'\n ] = movies_discretized_count_df.index.get_level_values(0)\nmovies_discretized_count_df['percent_profit_category'\n ] = movies_discretized_count_df.index.get_level_values(1)\nmovies_discretized_count_df = movies_discretized_count_df.reset_index(drop=True\n )\nproduction_company_discretized_count_df = movies_discretized_count_df.groupby([\n 'production_company'])['counts'].sum()\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company']\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'DW'], 82)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'Disney'], 116)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'Fox'], 298)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'MGM'], 87)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'None'], 1782)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'Paramount'], 235)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'Sony'], 42)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'Universal'], 282)\nmovies_discretized_count_df['production_company_count'\n ] = movies_discretized_count_df['production_company_count'].replace([\n 'WB'], 269)\nmovies_discretized_count_df['percent'] = movies_discretized_count_df['counts'\n ] / movies_discretized_count_df['production_company_count'] * 100\nmovies_discretized_count_df.drop(['counts', 'production_company_count'],\n axis=1, inplace=True)\nmovies_discretized_count_week = movies_discretized.groupby(['week',\n 'percent_profit'])['week'].count()\nmovies_discretized_count_df_week = pd.DataFrame(movies_discretized_count_week)\nmovies_discretized_count_df_week.columns = ['counts']\nmovies_discretized_count_df_week['week'\n ] = movies_discretized_count_df_week.index.get_level_values(0)\nmovies_discretized_count_df_week['percent_profit_category'\n ] = movies_discretized_count_df_week.index.get_level_values(1)\nmovies_discretized_count_df_week = (movies_discretized_count_df_week.\n reset_index(drop=True))\nsum_discretized_count_df_week = movies_discretized_count_df_week.groupby([\n 'week'])['counts'].sum()\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week']\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week_count'].replace(['week_1'], 783)\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week_count'].replace(['week_2'], 817)\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week_count'].replace(['week_3'], 782)\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week_count'].replace(['week_4'], 811)\nmovies_discretized_count_df_week['week_count'\n ] = movies_discretized_count_df_week['week_count'].astype(np.int64)\nmovies_discretized_count_df_week['percent'] = movies_discretized_count_df_week[\n 'counts'] / movies_discretized_count_df_week['week_count'] * 100\nmovies_discretized_count_df_week.drop(['counts', 'week_count'], axis=1,\n inplace=True)\nmovies_IMDb = pd.read_csv(\n 'C:/Users/lewis/OneDrive/Documents/MovieData/IMDb_movies.csv')\nclean_IMDb = movies_IMDb.drop(columns=['imdb_title_id', 'original_title',\n 'description', 'reviews_from_users', 'reviews_from_critics'])\nclean_IMDb.dropna(inplace=True)\nIMDb_movies_genre = groupby_count(clean_IMDb, 'genre', 'genre')\nIMDb_movies_genre.sort_values(['count'], ascending=[False], inplace=True)\nrevenue_covid = pd.read_csv(\n 'C:/Users/lewis/OneDrive/Documents/MovieData/revenue_covid_impact.csv')\nprint(revenue_covid)\nAMC_revenue = pd.read_csv('C:/Users/lewis/OneDrive/Documents/MovieData/AMC.csv'\n )\nprint(AMC_revenue.head())\nplt.plot(AMC_revenue.Year, AMC_revenue.Money, 'o')\nplt.title('AMC revenue over 15 years')\nplt.xlabel('Year')\nplt.ylabel('Revenue')\nplt.show()\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Thu Sep 9 18:52:17 2021\r\n\r\n@author: lewis\r\n\"\"\"\r\n\r\nimport csv \r\nimport pandas as pd \r\nimport re \r\nimport statistics\r\nimport matplotlib.pyplot as plt\r\nimport numpy as np\r\nfrom bs4 import BeautifulSoup\r\nfrom urllib.request import urlopen\r\n\r\n\r\n#Creating a function that groups by, counts, creates a new column from the index, drops the index and changes the column names\r\ndef groupby_count(df, groupby_column, count_column): \r\n new_df = pd.DataFrame(df.groupby(groupby_column)[count_column].count())\r\n new_df.columns = ['count']\r\n new_df[groupby_column] = new_df.index.get_level_values(0)\r\n new_df.reset_index(drop = True, inplace = True)\r\n return(new_df)\r\n\r\n\r\n\r\nurl = 'https://en.wikipedia.org/wiki/Film_series'\r\nhtml = urlopen(url) \r\nsoup = BeautifulSoup(html, 'html.parser')\r\ntables = soup.find_all('table')\r\n\r\n#Create a function to process the string into an integer by using re.sub()\r\ndef process_num(num):\r\n return float(re.sub(r'[^\\w\\s.]','',num))\r\n#test function\r\nnum1 = float(re.sub(r'[^\\w\\s.]','','1,156.30'))\r\n#print(num1)\r\n\r\n#Create array to hold the data extracted\r\ngross=[]\r\nyear=[]\r\nfilm=[]\r\n\r\nfor table in tables:\r\n rows = table.find_all('tr')\r\n \r\n for row in rows:\r\n cells = row.find_all('td')\r\n \r\n if len(cells) > 1:\r\n Franchise = cells[1]\r\n film.append(Franchise.text.strip())\r\n \r\n Gross = cells[6]\r\n gross.append(process_num(Gross.text.strip())) \r\n \r\n first = cells[7]\r\n year.append(int(first.text))\r\n \r\n# put the data in the pandas dataframe \r\nmovie_df= pd.DataFrame({'Gross': gross,\r\n 'first': year,\r\n 'Franchise': film\r\n }) \r\n#print(movie_df) \r\n#print(movie_df.dtypes)\r\n#movies_df_count = movie_df.groupby([\"Franchise\", \"first\"])[\"first\"].count() \r\n#print(movies_df_count)\r\n\r\n#WIKI_df=movie_df.groupby([\"first\"])[\"first\"].count() \r\n#print(WIKI_df)\r\n#WIKI_df.plot(kind='bar',x='first',y='count')\r\n#plt.title(\"Most Movies Release count by Year(Top 68 on WIKI)\",fontsize=20)\r\n\r\n#TMDB Kaggle Data \r\nmovies_TMDB_kaggle= pd.read_csv(r'C:/Users/lewis/OneDrive/Documents/MovieData/tmdb_5000_movies.csv', encoding= 'ISO-8859-1')\r\n#print(len(movies_TMDB_kaggle)) #result 4803 and 20 columns \r\n#print(movies_TMDB_kaggle.isnull().sum()) #tagline and homepage has the most NaN, unnecessary columns\r\n\r\n#Clean the dataframe, removed any unnecessary columns \r\nclean_TMDB_movies= movies_TMDB_kaggle.drop(columns=['homepage', 'id', 'overview', 'status', 'tagline', 'original_title'])\r\n#print(clean_TMDB_movies) #result 4803 rows and 14 columns \r\n#print(clean_TMDB_movies.isnull().sum()) # NaNs in the release_date and runtime column\r\nclean_TMDB_movies.dropna(inplace= True)\r\n#print(clean_TMDB_movies.isnull().sum())\r\n\r\n#Removing any movie that has a budget of 0 \r\nclean_TMDB_movies = clean_TMDB_movies[clean_TMDB_movies['budget'] != 0]\r\n#Removing any movie with a revenue of 0 \r\nclean_TMDB_movies = clean_TMDB_movies[clean_TMDB_movies['revenue'] != 0]\r\n#review the profit for each movie therefore a profit column was created \r\nclean_TMDB_movies['profit'] = clean_TMDB_movies['revenue'] - clean_TMDB_movies['budget']\r\n#Creating a percent profit column in order to compare profits. \r\nclean_TMDB_movies['percent_profit'] = clean_TMDB_movies['profit']/clean_TMDB_movies['budget']*100\r\n#print the top five \r\n#print(clean_TMDB_movies.head())\r\n\r\n#checking the data types \r\n#print(clean_TMDB_movies.dtypes)\r\n\r\n#change release_date to the date/time and separate it by month, day, and year\r\nclean_TMDB_movies['release_date'] = pd.to_datetime(clean_TMDB_movies['release_date'])\r\nclean_TMDB_movies['month'], clean_TMDB_movies['day'] = clean_TMDB_movies['release_date'].dt.month, clean_TMDB_movies['release_date'].dt.day\r\n#After new columns were added it is time to concat. \r\ncat = list(range(1,13))\r\n#Changing the month data type from int to ordered category \r\nclean_TMDB_movies['month'] = pd.Categorical(clean_TMDB_movies['month'], ordered = True, categories = cat)\r\n#confirmation\r\n#print(clean_TMDB_movies.month.dtype)\r\n#print(len(clean_TMDB_movies))\r\n#print(clean_TMDB_movies.describe())\r\n#print(clean_TMDB_movies.revenue.describe())\r\n#print(clean_TMDB_movies.profit.describe())\r\n#print(clean_TMDB_movies.vote_count.describe())\r\n#print(clean_TMDB_movies.percent_profit.describe())\r\n\r\n#discretize the budget column\r\ncategories = [\"very_low\", \"low\", \"high\", \"very_high\"]\r\n#saving the clean_TMDB df as a discretized df \r\nmovies_discretized = clean_TMDB_movies \r\n#creating a budget cutoff using pandas cut function \r\nmovies_discretized[\"budget\"] = pd.cut(movies_discretized[\"budget\"], [0, 13000000, 30000000, 62192550, 400000000], labels = categories)\r\n#repeat the step for revenue \r\n#print(movies_discretized.revenue.describe())\r\nmovies_discretized[\"revenue\"] = pd.cut(movies_discretized[\"revenue\"], [0, 21458200, 62954020, 187976900, 2887965000], labels = categories)\r\n\r\n#profit\r\ncategories_profit = [\"negative\", \"low\", \"high\", \"very_high\"]\r\nmovies_discretized[\"profit\"] = pd.cut(movies_discretized[\"profit\"], [-165710100 , 0, 29314900, 140784100, 2560965000], labels = categories_profit)\r\n#print(movies_discretized[\"profit\"].head())\r\n\r\n#Vote_average-very_low: vote averages less than 6, low are between 6 to 6.5, high between 6.5 and 7 and very_high 7 and 8.5\r\nmovies_discretized[\"vote_average\"] = pd.cut(movies_discretized[\"vote_average\"], [0, 6, 6.5, 7, 8.5], labels = categories)\r\n#print(movies_discretized[\"vote_average\"].head())\r\n\r\n#Vote_count \r\nmovies_discretized[\"vote_count\"] = pd.cut(movies_discretized[\"vote_count\"], [0, 440, 1151, 2522, 14000], labels = categories)\r\n#print(movies_discretized[\"vote_count\"].head())\r\n\r\n#percent_profit \r\nmovies_discretized[\"percent_profit\"] = pd.cut(movies_discretized[\"percent_profit\"], [-100, 0, 108, 436, 6528], labels = categories_profit)\r\nmovies_discretized[\"percent_profit\"]\r\n\r\n#Categorizing days into weeks \r\n#print(movies_discretized.day.describe())\r\ncategories_weeks = [\"week_1\", \"week_2\", \"week_3\", \"week_4\"]\r\n\r\nmovies_discretized[\"week\"] = pd.cut(movies_discretized[\"day\"], [0, 8, 15, 22, 32], labels = categories_weeks)\r\n#print(movies_discretized[\"week\"].head())\r\n\r\n#day and release_date are no longer needed columns \r\nmovies_discretized.drop(columns=['day', 'release_date'], inplace = True)\r\n#print(movies_discretized.head())\r\n\r\n#Do major production companies have an impact the profit margin? \r\nproduction_company = []\r\nfor movie in movies_discretized['production_companies']:\r\n if \"Universal\" in movie:\r\n production_company.append(\"Universal\")\r\n elif \"Sony\" in movie: \r\n production_company.append(\"Sony\")\r\n elif \"Fox\" in movie: \r\n production_company.append(\"Fox\")\r\n elif \"DreamWorks\" in movie: \r\n production_company.append(\"DW\")\r\n elif \"MGM\" in movie: \r\n production_company.append(\"MGM\")\r\n elif \"Paramount\" in movie: \r\n production_company.append(\"Paramount\")\r\n elif \"Disney\" in movie: \r\n production_company.append(\"Disney\")\r\n elif \"Warner Bros\" in movie:\r\n production_company.append(\"WB\")\r\n else:\r\n production_company.append(\"None\")\r\n\r\nmovies_discretized[\"main_production\"] = production_company\r\n#print(movies_discretized[\"main_production\"].head())\r\nmovies_discretized_count = movies_discretized.groupby([\"main_production\", \"percent_profit\"])[\"main_production\"].count()\r\nmovies_discretized_count_df= pd.DataFrame(movies_discretized_count)\r\n#print(movies_discretized_count_df)\r\n#change the last column to count instead of main production \r\nmovies_discretized_count_df.columns = [\"counts\"]\r\n#print(movies_discretized_count_df.head())\r\n\r\n#total count for the number of percent_profit counts for each main production.\r\nmovies_discretized_count_df[\"production_company\"]=movies_discretized_count_df.index.get_level_values(0)\r\nmovies_discretized_count_df[\"percent_profit_category\"] = movies_discretized_count_df.index.get_level_values(1)\r\n#print(movies_discretized_count_df)\r\n\r\n#drop the indexes to create another column with the sum of the counts of each production \r\nmovies_discretized_count_df = movies_discretized_count_df.reset_index(drop = True)\r\n#The sum of each production company category. \r\nproduction_company_discretized_count_df = movies_discretized_count_df.groupby([\"production_company\"])[\"counts\"].sum()\r\n#print(production_company_discretized_count_df)\r\n\r\n#column with the overall counts for each production, construct a new column called production company count that replicates the production company, and then use the replace function to replace the 1s and 2s with the total count\r\nmovies_discretized_count_df[\"production_company_count\"] = movies_discretized_count_df[\"production_company\"] \r\n#Now replacing the income level with the total count for each income level \r\nmovies_discretized_count_df[\"production_company_count\"] = movies_discretized_count_df[\"production_company_count\"].replace([\"DW\"], 82)\r\nmovies_discretized_count_df[\"production_company_count\"] = movies_discretized_count_df[\"production_company_count\"].replace([\"Disney\"], 116)\r\nmovies_discretized_count_df[\"production_company_count\"] = movies_discretized_count_df[\"production_company_count\"].replace([\"Fox\"], 298)\r\nmovies_discretized_count_df[\"production_company_count\"] = movies_discretized_count_df[\"production_company_count\"].replace([\"MGM\"], 87)\r\nmovies_discretized_count_df[\"production_company_count\"] = movies_discretized_count_df[\"production_company_count\"].replace([\"None\"], 1782)\r\nmovies_discretized_count_df[\"production_company_count\"] = movies_discretized_count_df[\"production_company_count\"].replace([\"Paramount\"], 235)\r\nmovies_discretized_count_df[\"production_company_count\"] = movies_discretized_count_df[\"production_company_count\"].replace([\"Sony\"], 42)\r\nmovies_discretized_count_df[\"production_company_count\"] = movies_discretized_count_df[\"production_company_count\"].replace([\"Universal\"], 282)\r\nmovies_discretized_count_df[\"production_company_count\"] = movies_discretized_count_df[\"production_company_count\"].replace([\"WB\"], 269)\r\n#print(movies_discretized_count_df)\r\n\r\n#percentage \r\nmovies_discretized_count_df[\"percent\"] = movies_discretized_count_df[\"counts\"]/movies_discretized_count_df[\"production_company_count\"] *100\r\n#print(movies_discretized_count_df.head())\r\n#dropping production_company_count and count column no longer needed \r\nmovies_discretized_count_df.drop([\"counts\", \"production_company_count\"], axis = 1, inplace = True ) \r\n\r\n#graphing question 1 using Matplot lib\r\n#graph = movies_discretized_count_df.pivot(\"production_company\", \"percent_profit_category\",\"percent\").plot(kind=\"bar\", color= ['blue', 'green', 'purple', 'red'], title='Profit Margin amongst Production Companies') \r\n#change the x and y axis for graph\r\n#plt.ylabel(\"Percent Profit\")\r\n#plt.xlabel(\"Production\")\r\n#plt.xticks(rotation = 0)\r\n#position the legends underneath the graph; Now the graph looks beautiful\r\n#plt.legend( loc = \"lower center\", bbox_to_anchor = (.5, -.4), ncol = 4, title = \"Percent Profit Category\")\r\n#plt.show()\r\n\r\n#Question 2: Is it true that the month in which a film is released has an impact on its profit margin?\r\nmovies_discretized_count_week = movies_discretized.groupby([\"week\", \"percent_profit\"])[\"week\"].count()\r\nmovies_discretized_count_df_week = pd.DataFrame(movies_discretized_count_week)\r\n#Checking the dataframe \r\n#print(movies_discretized_count_df_week)\r\n\r\n#changing column that is labeled week to count \r\nmovies_discretized_count_df_week.columns = [\"counts\"]\r\n#total count for the number of % profit for each week \r\nmovies_discretized_count_df_week[\"week\"]=movies_discretized_count_df_week.index.get_level_values(0)\r\nmovies_discretized_count_df_week[\"percent_profit_category\"] = movies_discretized_count_df_week.index.get_level_values(1)\r\n#print(movies_discretized_count_df_week)\r\nmovies_discretized_count_df_week = movies_discretized_count_df_week.reset_index(drop = True) #drop the index\r\n#what is the sum of each production \r\nsum_discretized_count_df_week = movies_discretized_count_df_week.groupby([\"week\"])[\"counts\"].sum()\r\n#print(sum_discretized_count_df_week) #the sums are centered around 700-800s \r\nmovies_discretized_count_df_week[\"week_count\"] = movies_discretized_count_df_week[\"week\"] \r\n#Now replacing the income level with the total count for each income level \r\nmovies_discretized_count_df_week[\"week_count\"] = movies_discretized_count_df_week[\"week_count\"].replace([\"week_1\"], 783)\r\nmovies_discretized_count_df_week[\"week_count\"] = movies_discretized_count_df_week[\"week_count\"].replace([\"week_2\"], 817)\r\nmovies_discretized_count_df_week[\"week_count\"] = movies_discretized_count_df_week[\"week_count\"].replace([\"week_3\"], 782)\r\nmovies_discretized_count_df_week[\"week_count\"] = movies_discretized_count_df_week[\"week_count\"].replace([\"week_4\"], 811)\r\n#print(movies_discretized_count_df_week.head())\r\n\r\n#received an error Object with dtype category cannot perform the numpy op true_divide\r\nmovies_discretized_count_df_week[\"week_count\"]= movies_discretized_count_df_week[\"week_count\"].astype(np.int64) \r\n#convert into percentage; counts/week_count * 100 \r\nmovies_discretized_count_df_week[\"percent\"] = movies_discretized_count_df_week[\"counts\"]/movies_discretized_count_df_week[\"week_count\"] *100\r\n#print(movies_discretized_count_df_week.head()) \r\n\r\n#dropping the week_count and count column since the percent column is there those columns are no longer needed \r\nmovies_discretized_count_df_week.drop([\"counts\", \"week_count\"], axis = 1, inplace = True ) \r\n#Time to create a visual \r\n#graph_question_2 = movies_discretized_count_df_week.pivot(\"week\", \"percent_profit_category\", \"percent\").plot(kind=\"bar\", color = [\"blue\", \"green\", \"purple\", \"red\"], title = \"Impact of Percent Profit by Week\")\r\n#plt.ylabel(\"Percent\")\r\n#plt.xlabel(\"Week\")\r\n#plt.xticks(rotation = 0)\r\n#plt.legend( loc = \"lower center\", bbox_to_anchor = (.5, -.4), ncol = 4, title = \"Percent Profit\")\r\n#plt.show()\r\n\r\n\r\n#IMDb Kaggle Data \r\nmovies_IMDb= pd.read_csv(r'C:/Users/lewis/OneDrive/Documents/MovieData/IMDb_movies.csv')\r\nclean_IMDb= movies_IMDb.drop(columns=['imdb_title_id','original_title','description', 'reviews_from_users', 'reviews_from_critics'])\r\n#print(clean_IMDb) #85,855 rows and 17 columns \r\n#print(clean_IMDb.isnull().sum())\r\nclean_IMDb.dropna(inplace = True) #drop all the NaNs \r\n#print(clean_IMDb.isnull().sum()) #no more NaNs\r\n#print(len(clean_IMDb)) #6635\r\n#print(clean_IMDb.dtypes)\r\n\r\n# QUESTION 3: How does budget impact vote average?\r\n#plt.plot(clean_IMDb.budget, clean_IMDb.avg_vote, 'o')\r\n#plt.title('How does Budget Impact Vote Average?')\r\n#plt.xlabel('Budget')\r\n#plt.ylabel('Vote Average')\r\n#plt.show()\r\n\r\n#print(clean_IMDb['budget'].head())\r\n\r\n#print the top five \r\n#print(clean_IMDb.head())\r\n\r\n#Using the groupby_count function that takes the following arguments (df, groupby_column, count_column)\r\nIMDb_movies_genre = groupby_count(clean_IMDb, 'genre', 'genre')\r\n#Sorting the df, so the bar graph will be in descending order\r\nIMDb_movies_genre.sort_values(['count'], ascending=[False], inplace = True)\r\n\r\n\r\n\r\n#Statista movie theatre revenue and prediction to 2025 post COVID saving to a pd dataframe\r\nrevenue_covid= pd.read_csv(r'C:/Users/lewis/OneDrive/Documents/MovieData/revenue_covid_impact.csv')\r\nprint(revenue_covid)\r\nAMC_revenue= pd.read_csv(r'C:/Users/lewis/OneDrive/Documents/MovieData/AMC.csv')\r\n#print(AMC_revenue)\r\n#print(AMC_revenue.info())\r\nprint(AMC_revenue.head())\r\n\r\n#During 2020, AMC Theatres reported annual revenues of 1.24 billion U.S. dollars, a dramatic decrease from previous years as a consequence of the COVID-19 pandemic.\r\nplt.plot(AMC_revenue.Year, AMC_revenue.Money, 'o')\r\nplt.title('AMC revenue over 15 years')\r\nplt.xlabel('Year')\r\nplt.ylabel('Revenue')\r\nplt.show()\r\n\r\n#Global box office revenue coronavirus impact 2020-2025\r\n#revenue_covid.plot(x=\"Year\", y=[\"Originalforecast\", \"Marchrevision\", \"Julyrevision\"], kind=\"bar\")\r\n#plt.show()\r\n\r\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
username = 'lucychibukhchyan'
api_key = 'sjbEMjfNrrglXY4zCFufIw9IPlZ3SA'
client = TextmagicRestClient(username, api_key)
message = client.message.create(phones='7206337812', text=
'wow i sent a text from python!!!!')
<|reserved_special_token_1|>
from textmagic.rest import TextmagicRestClient
username = 'lucychibukhchyan'
api_key = 'sjbEMjfNrrglXY4zCFufIw9IPlZ3SA'
client = TextmagicRestClient(username, api_key)
message = client.message.create(phones='7206337812', text=
'wow i sent a text from python!!!!')
<|reserved_special_token_1|>
from textmagic.rest import TextmagicRestClient
username = 'lucychibukhchyan'
api_key = 'sjbEMjfNrrglXY4zCFufIw9IPlZ3SA'
client = TextmagicRestClient(username, api_key)
message = client.message.create(phones="7206337812", text="wow i sent a text from python!!!!")
|
flexible
|
{
"blob_id": "1ba39cfc1187b0efc7fc7e905a15de8dc7f80e0d",
"index": 8888,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nusername = 'lucychibukhchyan'\napi_key = 'sjbEMjfNrrglXY4zCFufIw9IPlZ3SA'\nclient = TextmagicRestClient(username, api_key)\nmessage = client.message.create(phones='7206337812', text=\n 'wow i sent a text from python!!!!')\n",
"step-3": "from textmagic.rest import TextmagicRestClient\nusername = 'lucychibukhchyan'\napi_key = 'sjbEMjfNrrglXY4zCFufIw9IPlZ3SA'\nclient = TextmagicRestClient(username, api_key)\nmessage = client.message.create(phones='7206337812', text=\n 'wow i sent a text from python!!!!')\n",
"step-4": "from textmagic.rest import TextmagicRestClient\n\nusername = 'lucychibukhchyan'\napi_key = 'sjbEMjfNrrglXY4zCFufIw9IPlZ3SA'\nclient = TextmagicRestClient(username, api_key)\n\nmessage = client.message.create(phones=\"7206337812\", text=\"wow i sent a text from python!!!!\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#############################################################################
## Crytek Source File
## Copyright (C) 2013, Crytek Studios
##
## Creator: Christopher Bolte
## Date: Oct 31, 2013
## Description: WAF based build system
#############################################################################
from waflib.Configure import conf
def load_linux_x64_common_settings(v):
"""
Setup all compiler and linker settings shared over all linux_x64 configurations
"""
# Add common linux x64 defines
v['DEFINES'] += [ 'LINUX64' ]
@conf
def load_debug_linux_x64_settings(conf):
"""
Setup all compiler and linker settings shared over all linux_x64 configurations for
the 'debug' configuration
"""
v = conf.env
load_linux_x64_common_settings(v)
@conf
def load_profile_linux_x64_settings(conf):
"""
Setup all compiler and linker settings shared over all linux_x64 configurations for
the 'profile' configuration
"""
v = conf.env
load_linux_x64_common_settings(v)
@conf
def load_performance_linux_x64_settings(conf):
"""
Setup all compiler and linker settings shared over all linux_x64 configurations for
the 'performance' configuration
"""
v = conf.env
load_linux_x64_common_settings(v)
@conf
def load_release_linux_x64_settings(conf):
"""
Setup all compiler and linker settings shared over all linux_x64 configurations for
the 'release' configuration
"""
v = conf.env
load_linux_x64_common_settings(v)
|
normal
|
{
"blob_id": "5848273a76995825f01df53d6beed534e6f9f9fe",
"index": 8730,
"step-1": "<mask token>\n\n\n@conf\ndef load_debug_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'debug' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n@conf\ndef load_profile_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'profile' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n<mask token>\n\n\n@conf\ndef load_release_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'release' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n",
"step-2": "<mask token>\n\n\n@conf\ndef load_debug_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'debug' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n@conf\ndef load_profile_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'profile' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n@conf\ndef load_performance_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'performance' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n@conf\ndef load_release_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'release' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n",
"step-3": "<mask token>\n\n\ndef load_linux_x64_common_settings(v):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations\n\t\"\"\"\n v['DEFINES'] += ['LINUX64']\n\n\n@conf\ndef load_debug_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'debug' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n@conf\ndef load_profile_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'profile' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n@conf\ndef load_performance_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'performance' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n@conf\ndef load_release_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'release' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n",
"step-4": "from waflib.Configure import conf\n\n\ndef load_linux_x64_common_settings(v):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations\n\t\"\"\"\n v['DEFINES'] += ['LINUX64']\n\n\n@conf\ndef load_debug_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'debug' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n@conf\ndef load_profile_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'profile' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n@conf\ndef load_performance_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'performance' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n\n\n@conf\ndef load_release_linux_x64_settings(conf):\n \"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'release' configuration\n\t\"\"\"\n v = conf.env\n load_linux_x64_common_settings(v)\n",
"step-5": "#############################################################################\n## Crytek Source File\n## Copyright (C) 2013, Crytek Studios\n##\n## Creator: Christopher Bolte\n## Date: Oct 31, 2013\n## Description: WAF based build system\n#############################################################################\nfrom waflib.Configure import conf\n\ndef load_linux_x64_common_settings(v):\n\t\"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations\n\t\"\"\"\n\t\n\t# Add common linux x64 defines\n\tv['DEFINES'] += [ 'LINUX64' ]\t\n\t\n@conf\ndef load_debug_linux_x64_settings(conf):\n\t\"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'debug' configuration\n\t\"\"\"\n\tv = conf.env\n\tload_linux_x64_common_settings(v)\n\t\n@conf\ndef load_profile_linux_x64_settings(conf):\n\t\"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'profile' configuration\n\t\"\"\"\n\tv = conf.env\n\tload_linux_x64_common_settings(v)\n\t\n@conf\ndef load_performance_linux_x64_settings(conf):\n\t\"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'performance' configuration\n\t\"\"\"\n\tv = conf.env\n\tload_linux_x64_common_settings(v)\n\t\n@conf\ndef load_release_linux_x64_settings(conf):\n\t\"\"\"\n\tSetup all compiler and linker settings shared over all linux_x64 configurations for\n\tthe 'release' configuration\n\t\"\"\"\n\tv = conf.env\n\tload_linux_x64_common_settings(v)\n\t",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
import os
import sys
import pandas as pd
import pickle as pkl
from src.utils import image as im
if __name__ == '__main__':
pickled = True
create_sets = True
normed = False
if len(sys.argv) > 2:
filename = sys.argv[1]
else:
filename = os.path.join(os.path.pardir, os.path.pardir, 'data',
'final_transp_directpkl.pkl')
if os.path.splitext(filename)[1] == '.txt':
iter_csv = pd.read_csv(filename, sep='\t', index_col=0, chunksize=20000
)
df = pd.concat([chunk for chunk in iter_csv])
else:
df = pkl.load(open(filename, 'rb'))
fig = im.plot_genes(df.sample(1000))
fig.savefig(os.path.splitext(filename)[0] + '.png')
|
normal
|
{
"blob_id": "18a17c7326a6ae96f74c843d1a902074b377a6d2",
"index": 2701,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n pickled = True\n create_sets = True\n normed = False\n if len(sys.argv) > 2:\n filename = sys.argv[1]\n else:\n filename = os.path.join(os.path.pardir, os.path.pardir, 'data',\n 'final_transp_directpkl.pkl')\n if os.path.splitext(filename)[1] == '.txt':\n iter_csv = pd.read_csv(filename, sep='\\t', index_col=0, chunksize=20000\n )\n df = pd.concat([chunk for chunk in iter_csv])\n else:\n df = pkl.load(open(filename, 'rb'))\n fig = im.plot_genes(df.sample(1000))\n fig.savefig(os.path.splitext(filename)[0] + '.png')\n",
"step-3": "import os\nimport sys\nimport pandas as pd\nimport pickle as pkl\nfrom src.utils import image as im\nif __name__ == '__main__':\n pickled = True\n create_sets = True\n normed = False\n if len(sys.argv) > 2:\n filename = sys.argv[1]\n else:\n filename = os.path.join(os.path.pardir, os.path.pardir, 'data',\n 'final_transp_directpkl.pkl')\n if os.path.splitext(filename)[1] == '.txt':\n iter_csv = pd.read_csv(filename, sep='\\t', index_col=0, chunksize=20000\n )\n df = pd.concat([chunk for chunk in iter_csv])\n else:\n df = pkl.load(open(filename, 'rb'))\n fig = im.plot_genes(df.sample(1000))\n fig.savefig(os.path.splitext(filename)[0] + '.png')\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def run():
contact_book = ContactBook()
with open('22_agenda/contactos.csv', 'r') as f:
reader = csv.reader(f)
for idx, row in enumerate(reader):
if idx == 0:
continue
else:
contact_book.add(row[0], row[1], row[2])
while True:
comando = input(
"""
Que desea hacer
a. añadir contacto
b. actualizar contacto
c. buscar contacto
d. eliminar contacto
e. listar contacto
f. salir
: """
)
if comando == 'a':
print('añadir contacto')
nombre = input('Escribe el nombre de la persona: ')
telefono = input('Escribe el telefono de la persona: ')
email = input('ingrese el email de la persona: ')
contact_book.add(nombre, telefono, email)
elif comando == 'b':
print('actualizar contacto')
nombre = input('Escribe el nombre de la persona: ')
contact_book.update_menu(nombre)
elif comando == 'c':
print('buscar contacto')
nombre = input('Escribe el nombre de la persona: ')
contact_book.search(nombre)
elif comando == 'd':
print('eliminar contacto')
nombre = input('Escribe el nombre de la persona: ')
contact_book.delete(nombre)
elif comando == 'e':
print('listar contactos')
contact_book.show_all()
elif comando == 'f':
print('salir ')
break
else:
print('opcion no valida')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def run():
contact_book = ContactBook()
with open('22_agenda/contactos.csv', 'r') as f:
reader = csv.reader(f)
for idx, row in enumerate(reader):
if idx == 0:
continue
else:
contact_book.add(row[0], row[1], row[2])
while True:
comando = input(
"""
Que desea hacer
a. añadir contacto
b. actualizar contacto
c. buscar contacto
d. eliminar contacto
e. listar contacto
f. salir
: """
)
if comando == 'a':
print('añadir contacto')
nombre = input('Escribe el nombre de la persona: ')
telefono = input('Escribe el telefono de la persona: ')
email = input('ingrese el email de la persona: ')
contact_book.add(nombre, telefono, email)
elif comando == 'b':
print('actualizar contacto')
nombre = input('Escribe el nombre de la persona: ')
contact_book.update_menu(nombre)
elif comando == 'c':
print('buscar contacto')
nombre = input('Escribe el nombre de la persona: ')
contact_book.search(nombre)
elif comando == 'd':
print('eliminar contacto')
nombre = input('Escribe el nombre de la persona: ')
contact_book.delete(nombre)
elif comando == 'e':
print('listar contactos')
contact_book.show_all()
elif comando == 'f':
print('salir ')
break
else:
print('opcion no valida')
if __name__ == '__main__':
run()
<|reserved_special_token_1|>
from ContactBook import ContactBook
import csv
def run():
contact_book = ContactBook()
with open('22_agenda/contactos.csv', 'r') as f:
reader = csv.reader(f)
for idx, row in enumerate(reader):
if idx == 0:
continue
else:
contact_book.add(row[0], row[1], row[2])
while True:
comando = input(
"""
Que desea hacer
a. añadir contacto
b. actualizar contacto
c. buscar contacto
d. eliminar contacto
e. listar contacto
f. salir
: """
)
if comando == 'a':
print('añadir contacto')
nombre = input('Escribe el nombre de la persona: ')
telefono = input('Escribe el telefono de la persona: ')
email = input('ingrese el email de la persona: ')
contact_book.add(nombre, telefono, email)
elif comando == 'b':
print('actualizar contacto')
nombre = input('Escribe el nombre de la persona: ')
contact_book.update_menu(nombre)
elif comando == 'c':
print('buscar contacto')
nombre = input('Escribe el nombre de la persona: ')
contact_book.search(nombre)
elif comando == 'd':
print('eliminar contacto')
nombre = input('Escribe el nombre de la persona: ')
contact_book.delete(nombre)
elif comando == 'e':
print('listar contactos')
contact_book.show_all()
elif comando == 'f':
print('salir ')
break
else:
print('opcion no valida')
if __name__ == '__main__':
run()
<|reserved_special_token_1|>
from ContactBook import ContactBook
import csv
def run():
contact_book = ContactBook()
with open("22_agenda/contactos.csv",'r') as f:
reader = csv.reader(f)
for idx,row in enumerate(reader):
if idx == 0:
continue
else:
contact_book.add(row[0],row[1],row[2])
while True:
comando = input('''
Que desea hacer
a. añadir contacto
b. actualizar contacto
c. buscar contacto
d. eliminar contacto
e. listar contacto
f. salir
: ''')
if comando == 'a':
print("añadir contacto")
nombre = input("Escribe el nombre de la persona: ")
telefono = input("Escribe el telefono de la persona: ")
email = input("ingrese el email de la persona: ")
contact_book.add(nombre,telefono,email)
elif comando == 'b':
print("actualizar contacto")
nombre = input("Escribe el nombre de la persona: ")
contact_book.update_menu(nombre)
elif comando == 'c':
print("buscar contacto")
nombre = input("Escribe el nombre de la persona: ")
contact_book.search(nombre)
elif comando == 'd':
print("eliminar contacto")
nombre = input("Escribe el nombre de la persona: ")
contact_book.delete(nombre)
elif comando == 'e':
print("listar contactos")
contact_book.show_all()
elif comando == 'f':
print("salir ")
break
else:
print("opcion no valida")
if __name__ == "__main__":
run()
|
flexible
|
{
"blob_id": "f5831b84c1177d8b869db05d332bd364b3f72fff",
"index": 4282,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef run():\n contact_book = ContactBook()\n with open('22_agenda/contactos.csv', 'r') as f:\n reader = csv.reader(f)\n for idx, row in enumerate(reader):\n if idx == 0:\n continue\n else:\n contact_book.add(row[0], row[1], row[2])\n while True:\n comando = input(\n \"\"\"\n Que desea hacer\n a. añadir contacto\n b. actualizar contacto\n c. buscar contacto\n d. eliminar contacto\n e. listar contacto\n f. salir \n : \"\"\"\n )\n if comando == 'a':\n print('añadir contacto')\n nombre = input('Escribe el nombre de la persona: ')\n telefono = input('Escribe el telefono de la persona: ')\n email = input('ingrese el email de la persona: ')\n contact_book.add(nombre, telefono, email)\n elif comando == 'b':\n print('actualizar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.update_menu(nombre)\n elif comando == 'c':\n print('buscar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.search(nombre)\n elif comando == 'd':\n print('eliminar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.delete(nombre)\n elif comando == 'e':\n print('listar contactos')\n contact_book.show_all()\n elif comando == 'f':\n print('salir ')\n break\n else:\n print('opcion no valida')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef run():\n contact_book = ContactBook()\n with open('22_agenda/contactos.csv', 'r') as f:\n reader = csv.reader(f)\n for idx, row in enumerate(reader):\n if idx == 0:\n continue\n else:\n contact_book.add(row[0], row[1], row[2])\n while True:\n comando = input(\n \"\"\"\n Que desea hacer\n a. añadir contacto\n b. actualizar contacto\n c. buscar contacto\n d. eliminar contacto\n e. listar contacto\n f. salir \n : \"\"\"\n )\n if comando == 'a':\n print('añadir contacto')\n nombre = input('Escribe el nombre de la persona: ')\n telefono = input('Escribe el telefono de la persona: ')\n email = input('ingrese el email de la persona: ')\n contact_book.add(nombre, telefono, email)\n elif comando == 'b':\n print('actualizar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.update_menu(nombre)\n elif comando == 'c':\n print('buscar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.search(nombre)\n elif comando == 'd':\n print('eliminar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.delete(nombre)\n elif comando == 'e':\n print('listar contactos')\n contact_book.show_all()\n elif comando == 'f':\n print('salir ')\n break\n else:\n print('opcion no valida')\n\n\nif __name__ == '__main__':\n run()\n",
"step-4": "from ContactBook import ContactBook\nimport csv\n\n\ndef run():\n contact_book = ContactBook()\n with open('22_agenda/contactos.csv', 'r') as f:\n reader = csv.reader(f)\n for idx, row in enumerate(reader):\n if idx == 0:\n continue\n else:\n contact_book.add(row[0], row[1], row[2])\n while True:\n comando = input(\n \"\"\"\n Que desea hacer\n a. añadir contacto\n b. actualizar contacto\n c. buscar contacto\n d. eliminar contacto\n e. listar contacto\n f. salir \n : \"\"\"\n )\n if comando == 'a':\n print('añadir contacto')\n nombre = input('Escribe el nombre de la persona: ')\n telefono = input('Escribe el telefono de la persona: ')\n email = input('ingrese el email de la persona: ')\n contact_book.add(nombre, telefono, email)\n elif comando == 'b':\n print('actualizar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.update_menu(nombre)\n elif comando == 'c':\n print('buscar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.search(nombre)\n elif comando == 'd':\n print('eliminar contacto')\n nombre = input('Escribe el nombre de la persona: ')\n contact_book.delete(nombre)\n elif comando == 'e':\n print('listar contactos')\n contact_book.show_all()\n elif comando == 'f':\n print('salir ')\n break\n else:\n print('opcion no valida')\n\n\nif __name__ == '__main__':\n run()\n",
"step-5": "from ContactBook import ContactBook\nimport csv\n\ndef run(): \n\n contact_book = ContactBook()\n \n with open(\"22_agenda/contactos.csv\",'r') as f:\n reader = csv.reader(f)\n for idx,row in enumerate(reader):\n if idx == 0:\n continue\n else:\n contact_book.add(row[0],row[1],row[2])\n\n\n while True:\n comando = input('''\n Que desea hacer\n a. añadir contacto\n b. actualizar contacto\n c. buscar contacto\n d. eliminar contacto\n e. listar contacto\n f. salir \n : ''')\n\n if comando == 'a':\n print(\"añadir contacto\")\n nombre = input(\"Escribe el nombre de la persona: \")\n telefono = input(\"Escribe el telefono de la persona: \")\n email = input(\"ingrese el email de la persona: \")\n\n contact_book.add(nombre,telefono,email)\n\n elif comando == 'b':\n print(\"actualizar contacto\")\n nombre = input(\"Escribe el nombre de la persona: \")\n contact_book.update_menu(nombre)\n\n elif comando == 'c':\n print(\"buscar contacto\")\n nombre = input(\"Escribe el nombre de la persona: \")\n contact_book.search(nombre)\n\n elif comando == 'd':\n print(\"eliminar contacto\")\n nombre = input(\"Escribe el nombre de la persona: \")\n contact_book.delete(nombre)\n\n elif comando == 'e':\n print(\"listar contactos\")\n contact_book.show_all()\n\n elif comando == 'f':\n print(\"salir \")\n break\n\n else:\n print(\"opcion no valida\")\n\nif __name__ == \"__main__\":\n run()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
# including libraries
import roslib
import sys
import rospy
import cv2
import math
from std_msgs.msg import String
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
import numpy as np
import matplotlib.pyplot as plt
MAP = np.array([[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,0],[0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,0],[0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0],[0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,0],[0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,1,0],[0,1,1,1,0,1,0,1,1,1,0,1,1,1,0,1,1,1,1,0],[0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0],[0,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,0],[0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,0],[0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0],[0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,0],[0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,0],[0,1,0,1,0,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0],[0,1,0,1,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0],[0,1,0,1,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]])
position_history = (0,0)
class labyrinth_solver:
def __init__(self):
self.image_pub = rospy.Publisher("final_image",Image)
self.bridge = CvBridge()
self.image_sub = rospy.Subscriber("/usb_cam/image_raw",Image,self.callback)
def callback(self,data):
try:
cv_image = self.bridge.imgmsg_to_cv2(data, desired_encoding="bgr8")
except CvBridgeError, e:
print e
# crop out the labyrinth region (y by x)
cv_image = cv_image[22:240, 44:268]
# resize the image to 200x200 each region is 10x10
cv_image = cv2.resize(cv_image, (400, 400))
# transfer the image from RGB to HSV
hsv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)
# Red Ball Segmentation
lower_red = np.array([0,50,150])
upper_red = np.array([50,150,250])
temp_ball = cv2.inRange(hsv_image,lower_red,upper_red)
# Erosion and Dilation processing
kernel = np.ones((3,3),np.uint8)
temp_ball = cv2.dilate(temp_ball,kernel,iterations = 2)
#cv2.imshow("Red Ball", temp_ball)
# Calculate the contour
contours,hierarcy = cv2.findContours(temp_ball,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
# Select the biggest contout as the target
max_area = 0
for cnt in contours:
area=cv2.contourArea(cnt)
if area > max_area:
max_area=area
target = cnt
global position_history # calling global variable
# handling with target missing
if max_area >= 10:
(x,y),radius = cv2.minEnclosingCircle(target)
center = (int(x),int(y))
else:
center = position_history
# Compensate with some noise
radius = 10
if abs(center[0]-position_history[0])+abs(center[1]-position_history[1])<=4:
center = position_history
cv2.circle(cv_image,center,radius,(0,255,0),2)
position_history = center
cv2.imshow("Ball tracking", cv_image)
# manipulate the center coordinate to be the nearest 10 while extract the position in 20 by 20
# FIRST check who is more close to 0
checkx = center[0]%20-10
checky = center[1]%20-15
if abs(checkx) <= abs(checky):
newx = center[0] - checkx
newy = center[1]*0.955
elif abs(checkx) > abs(checky):
newx = center[0]
newy = 0.955*(center[1] - checky)
newcenter = (newx, int(newy))
# read the reference map for animation
map_ref = cv2.imread('/home/sunyue/catkin_ws/src/tracking/map.png')
cv2.circle(map_ref,newcenter,radius,(0,0,255),-5)
# SECOND transfer the real location to the 20x20 grid
gridx = newcenter[0]/20+1
gridy = newcenter[1]/20+1
# A* for path planning
goal = [10,2]
current = [gridx, gridy]
precheck = abs(current[0]-goal[0])+abs(current[1]-goal[1])
if precheck == 0: check = 0
else: check = 100
path = np.array([current])
backup = np.array([[0,0,0,0]])
while check!=0:
# generate the potential candidate
north = [current[0],current[1]-1]
south = [current[0],current[1]+1]
east = [current[0]+1,current[1]]
west = [current[0]-1,current[1]]
#print current
# calculate the heuristic
n_heuristic = math.sqrt(pow(north[0]-goal[0],2)+pow(north[1]-goal[1],2))
s_heuristic = math.sqrt(pow(south[0]-goal[0],2)+pow(south[1]-goal[1],2))
e_heuristic = math.sqrt(pow(east[0]-goal[0],2)+pow(east[1]-goal[1],2))
w_heuristic = math.sqrt(pow(west[0]-goal[0],2)+pow(west[1]-goal[1],2))
# check the punishment of obstacle
if MAP[north[1]-1,north[0]-1]==0: n_punish = 2000
else: n_punish = 0
if MAP[south[1]-1,south[0]-1]==0: s_punish = 2000
else: s_punish = 0
if MAP[east[1]-1,east[0]-1]==0: e_punish = 2000
else: e_punish = 0
if MAP[west[1]-1,west[0]-1]==0: w_punish = 2000
else: w_punish = 0
#print n_punish, s_punish, e_punish, w_punish
# check last node never go back
num = path.shape[0] # get the path step number
if num!=1:
last_step = path[-2]
n_check = north - last_step
s_check = south - last_step
e_check = east - last_step
w_check = west - last_step
if ( n_check[0]==0 and n_check[1]==0): n_punish = 2000
if ( s_check[0]==0 and s_check[1]==0): s_punish = 2000
if ( e_check[0]==0 and e_check[1]==0): e_punish = 2000
if ( w_check[0]==0 and w_check[1]==0): w_punish = 2000
# sum the cost together
n_cost = int(n_heuristic + n_punish)
s_cost = int(s_heuristic + s_punish)
e_cost = int(e_heuristic + e_punish)
w_cost = int(w_heuristic + w_punish)
cost = [n_cost, s_cost, e_cost, w_cost]
# there will be some situations should be taken into consideration
index = np.argmin(cost) # where the smallest cost is located
mincost = cost[index]
# First only one direction cost is less than 1000, then just pick that
if mincost<=1000: # there must be at least one solution
sumcheck = cost[0]+cost[1]+cost[2]+cost[3]
if sumcheck >= 6000: # only one next choice
if index == 0: next = north
elif index == 1: next = south
elif index == 2: next = east
elif index == 3: next = west
# update the path
path = np.append(path,[next],axis=0)
# update the check for next while
precheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])
if precheck == 0:
check = 0
# updat the current
current = next
elif (sumcheck >= 4000 and sumcheck < 6000) : # two posible choices
if index == 0: next = north
elif index == 1: next = south
elif index == 2: next = east
elif index == 3: next = west
# update the path choose the one have the least cost
path = np.append(path,[next],axis=0)
# update the check for next while
precheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])
if precheck == 0:
check = 0
# save the branch to the back up [current, branch]
fakecost = cost
fakecost[index] = 2000 # mannually fake the minimum cost choice
fakeindex = np.argmin(fakecost) # where the smallest cost is located
if fakeindex == 0: branch = north
elif fakeindex == 1: branch = south
elif fakeindex == 2: branch = east
elif fakeindex == 3: branch = west
backup = np.append([[current[0],current[1],branch[0],branch[1]]], backup, axis=0)
# updat the current
current = next
elif (sumcheck >= 2000 and sumcheck < 4000) : # three posible choices
if index == 0: next = north
elif index == 1: next = south
elif index == 2: next = east
elif index == 3: next = west
# update the path choose the one have the least cost
path = np.append(path,[next],axis=0)
# update the check for next while
precheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])
if precheck == 0:
check = 0
# save the branch to the back up [current, branch]
# second cost
secondcost = cost
secondcost[index] = 2000 # mannually fake the minimum cost choice
secondindex = np.argmin(secondcost) # where the smallest cost is located
if secondindex == 0: branch1 = north
elif secondindex == 1: branch1 = south
elif secondindex == 2: branch1 = east
elif secondindex == 3: branch1 = west
thirdcost = secondcost
thirdcost[secondindex] = 2000 # mannually fake the minimum cost choice
thirdindex = np.argmin(thirdcost) # where the smallest cost is located
if thirdindex == 0: branch2 = north
elif thirdindex == 1: branch2 = south
elif thirdindex == 2: branch2 = east
elif thirdindex == 3: branch2 = west
# update branch based on cost difference
backup = np.append([[current[0],current[1],branch2[0],branch2[1]]], backup, axis=0)
backup = np.append([[current[0],current[1],branch1[0],branch1[1]]], backup, axis=0)
# updat the current
current = next
elif mincost>=2000: # there is no next choice we have go to backup branchs
# next step is the first ranking branch
next = [backup[0,2],backup[0,3]]
# cut the path back
current = [backup[0,0],backup[0,1]]
compare = abs(path-current)
summation = sum(np.transpose(compare))
index = np.argmin(summation)
# cut the path from 0 to current one
path = path[:index+1]
# update the path with next step
path = np.append(path,[next],axis=0)
# delete the first backup
backup = backup[1:]
# update the check for next while
precheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])
if precheck == 0:
check = 0
# updat the current
current = next
# A* algorithm is ended
steps = path.shape[0]
i = 0
while i < steps-1:
cv2.line(map_ref,(20*path[i,0]-10,20*path[i,1]-10),(20*path[i+1,0]-10,20*path[i+1,1]-10),(255,0,0),3)
i = i+1
cv2.imshow("Map Image", map_ref)
cv2.waitKey(1)
try:
self.image_pub.publish(self.bridge.cv2_to_imgmsg(cv_image, encoding="bgr8"))
except CvBridgeError, e:
print e
def main(args):
ic = labyrinth_solver()
rospy.init_node('labyrinth_solver', anonymous=True)
try:
rospy.spin()
except KeyboardInterrupt:
print "Shutting down"
cv2.destroyAllWindows()
if __name__ == '__main__':
main(sys.argv)
|
normal
|
{
"blob_id": "b30e6af035b589d5f4bd1bc6cccdd53c157861a0",
"index": 2144,
"step-1": "#!/usr/bin/env python\n\n# including libraries\nimport roslib\nimport sys\nimport rospy\nimport cv2\nimport math\nfrom std_msgs.msg import String\nfrom sensor_msgs.msg import Image\nfrom cv_bridge import CvBridge, CvBridgeError\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\nMAP = np.array([[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],[0,1,1,1,0,1,1,1,1,1,0,1,1,1,1,1,0,1,1,0],[0,1,0,1,0,1,0,0,0,0,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,0],[0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0,0],[0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,0],[0,0,0,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,1,0],[0,1,1,1,0,1,0,1,1,1,0,1,1,1,0,1,1,1,1,0],[0,1,0,1,0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0],[0,1,0,1,0,1,0,1,0,1,1,1,0,1,1,1,1,1,1,0],[0,1,0,1,0,1,0,1,0,1,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,0,1,0,1,0,1,1,1,0,1,1,1,1,0],[0,1,0,0,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0],[0,1,1,1,1,1,0,1,1,1,0,1,0,1,1,1,1,1,1,0],[0,1,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0],[0,1,0,1,1,1,1,1,1,0,1,1,1,0,1,1,1,0,1,0],[0,1,0,1,0,0,0,0,1,0,1,0,1,0,1,0,1,0,1,0],[0,1,0,1,0,0,1,0,1,0,1,0,1,0,1,0,1,0,1,0],[0,1,0,1,1,1,1,0,1,1,1,0,1,1,1,0,1,1,1,0],[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]])\n\nposition_history = (0,0)\n\n\nclass labyrinth_solver:\n\n\tdef __init__(self):\n\t\tself.image_pub = rospy.Publisher(\"final_image\",Image)\n\t\tself.bridge = CvBridge()\n\t\tself.image_sub = rospy.Subscriber(\"/usb_cam/image_raw\",Image,self.callback)\n\n\tdef callback(self,data):\n\t\ttry:\n\t\t\tcv_image = self.bridge.imgmsg_to_cv2(data, desired_encoding=\"bgr8\")\n\t\texcept CvBridgeError, e:\n\t\t\tprint e\n\t\t\n\t\t# crop out the labyrinth region (y by x)\n\t\tcv_image = cv_image[22:240, 44:268]\n\t\t# resize the image to 200x200 each region is 10x10\n\t\tcv_image = cv2.resize(cv_image, (400, 400)) \n\t\t# transfer the image from RGB to HSV\n\t\thsv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)\n\n\t\t# Red Ball Segmentation\n\t\tlower_red = np.array([0,50,150])\n\t\tupper_red = np.array([50,150,250])\n\t\ttemp_ball = cv2.inRange(hsv_image,lower_red,upper_red)\n\t\t# Erosion and Dilation processing\n\t\tkernel = np.ones((3,3),np.uint8)\n\t\ttemp_ball = cv2.dilate(temp_ball,kernel,iterations = 2)\n\t\t#cv2.imshow(\"Red Ball\", temp_ball)\n\t\t# Calculate the contour\n\t\tcontours,hierarcy = cv2.findContours(temp_ball,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)\n\t\t# Select the biggest contout as the target\t\t\n\t\tmax_area = 0\n\t\tfor cnt in contours:\n\t\t\tarea=cv2.contourArea(cnt)\n\t\t\tif area > max_area:\n\t\t\t\tmax_area=area\n\t\t\t\ttarget = cnt\n\n\t\t\n\t\tglobal position_history # calling global variable\n\t\t# handling with target missing\n\t\tif max_area >= 10:\n\t\t\t(x,y),radius = cv2.minEnclosingCircle(target)\n\t\t\tcenter = (int(x),int(y))\n\t\telse:\n\t\t\tcenter = position_history\n\t\t# Compensate with some noise\n\t\tradius = 10\n\t\tif abs(center[0]-position_history[0])+abs(center[1]-position_history[1])<=4:\n\t\t\tcenter = position_history\n\t\tcv2.circle(cv_image,center,radius,(0,255,0),2)\n\t\tposition_history = center\n\t\tcv2.imshow(\"Ball tracking\", cv_image)\n\t\t\n\n\t\t# manipulate the center coordinate to be the nearest 10 while extract the position in 20 by 20\t\t\n\t\t# FIRST check who is more close to 0\t\n\t\tcheckx = center[0]%20-10\n\t\tchecky = center[1]%20-15\n\t\tif abs(checkx) <= abs(checky):\n\t\t\tnewx = center[0] - checkx\n\t\t\tnewy = center[1]*0.955\n\t\telif abs(checkx) > abs(checky):\n\t\t\tnewx = center[0]\n\t\t\tnewy = 0.955*(center[1] - checky) \n\t\tnewcenter = (newx, int(newy))\n\t\t# read the reference map for animation\t\n\t\tmap_ref = cv2.imread('/home/sunyue/catkin_ws/src/tracking/map.png')\n\t\tcv2.circle(map_ref,newcenter,radius,(0,0,255),-5)\n\t\t\n\t\t# SECOND transfer the real location to the 20x20 grid\n\t\tgridx = newcenter[0]/20+1\n\t\tgridy = newcenter[1]/20+1\n\n\n\n\t\t# A* for path planning\n\t\tgoal = [10,2]\n\t\tcurrent = [gridx, gridy]\n\t\tprecheck = abs(current[0]-goal[0])+abs(current[1]-goal[1])\n\t\tif precheck == 0: check = 0\n\t\telse: check = 100\n\t\tpath = np.array([current])\n\t\tbackup = np.array([[0,0,0,0]])\n\n\n\t\twhile check!=0:\n\t\t\t# generate the potential candidate\n\t\t\tnorth = [current[0],current[1]-1]\n\t\t\tsouth = [current[0],current[1]+1]\n\t\t\teast = [current[0]+1,current[1]]\n\t\t\twest = [current[0]-1,current[1]]\n\n\t\t\t#print current\n\n\t\t\t# calculate the heuristic\n\t\t\tn_heuristic = math.sqrt(pow(north[0]-goal[0],2)+pow(north[1]-goal[1],2))\n\t\t\ts_heuristic = math.sqrt(pow(south[0]-goal[0],2)+pow(south[1]-goal[1],2))\n\t\t\te_heuristic = math.sqrt(pow(east[0]-goal[0],2)+pow(east[1]-goal[1],2))\n\t\t\tw_heuristic = math.sqrt(pow(west[0]-goal[0],2)+pow(west[1]-goal[1],2))\n\n\t\t\t# check the punishment of obstacle\n\t\t\tif MAP[north[1]-1,north[0]-1]==0: n_punish = 2000\n\t\t\telse: n_punish = 0\n\t\t\tif MAP[south[1]-1,south[0]-1]==0: s_punish = 2000\n\t\t\telse: s_punish = 0\n\t\t\tif MAP[east[1]-1,east[0]-1]==0: e_punish = 2000\n\t\t\telse: e_punish = 0\n\t\t\tif MAP[west[1]-1,west[0]-1]==0: w_punish = 2000\n\t\t\telse: w_punish = 0\n\n\t\t\t#print n_punish, s_punish, e_punish, w_punish\n\t\t\t# check last node never go back\n\t\t\tnum = path.shape[0] # get the path step number\n\t\t\tif num!=1:\n\t\t\t\tlast_step = path[-2]\n\t\t\t\tn_check = north - last_step\n\t\t\t\ts_check = south - last_step\n\t\t\t\te_check = east - last_step\n\t\t\t\tw_check = west - last_step\n\t\t\t\tif ( n_check[0]==0 and n_check[1]==0): n_punish = 2000\n\t\t\t\tif ( s_check[0]==0 and s_check[1]==0): s_punish = 2000\n\t\t\t\tif ( e_check[0]==0 and e_check[1]==0): e_punish = 2000\n\t\t\t\tif ( w_check[0]==0 and w_check[1]==0): w_punish = 2000\n\n\t\t\t# sum the cost together\n\t\t\tn_cost = int(n_heuristic + n_punish)\n\t\t\ts_cost = int(s_heuristic + s_punish)\n\t\t\te_cost = int(e_heuristic + e_punish)\n\t\t\tw_cost = int(w_heuristic + w_punish)\n\t\t\tcost = [n_cost, s_cost, e_cost, w_cost]\n\n\n\t\t\t# there will be some situations should be taken into consideration\n\t\t\tindex = np.argmin(cost) # where the smallest cost is located\n\t\t\tmincost = cost[index]\n\t\t\t# First only one direction cost is less than 1000, then just pick that\n\t\t\tif mincost<=1000: # there must be at least one solution\n\t\t\t\tsumcheck = cost[0]+cost[1]+cost[2]+cost[3]\n\t\t\t\tif sumcheck >= 6000: # only one next choice\n\t\t\t\t\tif index == 0: next = north\n\t\t\t\t\telif index == 1: next = south\n\t\t\t\t\telif index == 2: next = east\n\t\t\t\t\telif index == 3: next = west\n\t\t\t\t\t# update the path\n\t\t\t\t\tpath = np.append(path,[next],axis=0)\n\t\t\t\t\t# update the check for next while\n\t\t\t\t\tprecheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])\n\t\t\t\t\tif precheck == 0:\n\t\t\t\t\t\tcheck = 0\n\t\t\t\t\t# updat the current\n\t\t\t\t\tcurrent = next\n\n\t\t\t\telif (sumcheck >= 4000 and sumcheck < 6000) : # two posible choices\n\t\t\t\t\tif index == 0: next = north\n\t\t\t\t\telif index == 1: next = south\n\t\t\t\t\telif index == 2: next = east\n\t\t\t\t\telif index == 3: next = west\n\t\t\t\t\t# update the path choose the one have the least cost\n\t\t\t\t\tpath = np.append(path,[next],axis=0)\n\t\t\t\t\t# update the check for next while\n\t\t\t\t\tprecheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])\n\t\t\t\t\tif precheck == 0:\n\t\t\t\t\t\tcheck = 0\n\t\t\t\t\t# save the branch to the back up [current, branch]\n\t\t\t\t\tfakecost = cost\n\t\t\t\t\tfakecost[index] = 2000\t# mannually fake the minimum cost choice\n\t\t\t\t\tfakeindex = np.argmin(fakecost) # where the smallest cost is located\n\t\t\t\t\tif fakeindex == 0: branch = north\n\t\t\t\t\telif fakeindex == 1: branch = south\n\t\t\t\t\telif fakeindex == 2: branch = east\n\t\t\t\t\telif fakeindex == 3: branch = west\n\t\t\t\t\tbackup = np.append([[current[0],current[1],branch[0],branch[1]]], backup, axis=0)\n\t\t\t\t\t# updat the current\n\t\t\t\t\tcurrent = next\n\n\t\t\t\telif (sumcheck >= 2000 and sumcheck < 4000) : # three posible choices\n\t\t\t\t\tif index == 0: next = north\n\t\t\t\t\telif index == 1: next = south\n\t\t\t\t\telif index == 2: next = east\n\t\t\t\t\telif index == 3: next = west\n\t\t\t\t\t# update the path choose the one have the least cost\n\t\t\t\t\tpath = np.append(path,[next],axis=0)\n\t\t\t\t\t# update the check for next while\n\t\t\t\t\tprecheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])\n\t\t\t\t\tif precheck == 0:\n\t\t\t\t\t\tcheck = 0\n\t\t\t\t\t# save the branch to the back up [current, branch]\n\t\t\t\t\t# second cost\n\t\t\t\t\tsecondcost = cost\n\t\t\t\t\tsecondcost[index] = 2000\t# mannually fake the minimum cost choice\n\t\t\t\t\tsecondindex = np.argmin(secondcost) # where the smallest cost is located\n\t\t\t\t\tif secondindex == 0: branch1 = north\n\t\t\t\t\telif secondindex == 1: branch1 = south\n\t\t\t\t\telif secondindex == 2: branch1 = east\n\t\t\t\t\telif secondindex == 3: branch1 = west\n\n\t\t\t\t\tthirdcost = secondcost\n\t\t\t\t\tthirdcost[secondindex] = 2000\t# mannually fake the minimum cost choice\n\t\t\t\t\tthirdindex = np.argmin(thirdcost) # where the smallest cost is located\n\t\t\t\t\tif thirdindex == 0: branch2 = north\n\t\t\t\t\telif thirdindex == 1: branch2 = south\n\t\t\t\t\telif thirdindex == 2: branch2 = east\n\t\t\t\t\telif thirdindex == 3: branch2 = west\n\t\t\t\t\t# update branch based on cost difference\n\t\t\t\t\tbackup = np.append([[current[0],current[1],branch2[0],branch2[1]]], backup, axis=0)\n\t\t\t\t\tbackup = np.append([[current[0],current[1],branch1[0],branch1[1]]], backup, axis=0)\n\t\t\t\t\t# updat the current\n\t\t\t\t\tcurrent = next\n\n\n\n\t\t\telif mincost>=2000: # there is no next choice we have go to backup branchs\n\t\t\t\t# next step is the first ranking branch\t\t\t\t\n\t\t\t\tnext = [backup[0,2],backup[0,3]]\n\t\t\t\t# cut the path back\n\t\t\t\tcurrent = [backup[0,0],backup[0,1]]\n\t\t\t\tcompare = abs(path-current)\n\t\t\t\tsummation = sum(np.transpose(compare))\n\t\t\t\tindex = np.argmin(summation)\n\t\t\t\t# cut the path from 0 to current one\n\t\t\t\tpath = path[:index+1]\n\t\t\t\t# update the path with next step\n\t\t\t\tpath = np.append(path,[next],axis=0)\n\t\t\t\t# delete the first backup\n\t\t\t\tbackup = backup[1:]\n\t\t\t\t# update the check for next while\n\t\t\t\tprecheck = abs(next[0]-goal[0])+abs(next[1]-goal[1])\n\t\t\t\tif precheck == 0:\n\t\t\t\t\tcheck = 0\n\t\t\t\t# updat the current\n\t\t\t\tcurrent = next\n\t\t\n\t\t# A* algorithm is ended\n\n\t\tsteps = path.shape[0]\n\t\ti = 0\n\t\twhile i < steps-1:\n\t\t\tcv2.line(map_ref,(20*path[i,0]-10,20*path[i,1]-10),(20*path[i+1,0]-10,20*path[i+1,1]-10),(255,0,0),3)\n\t\t\ti = i+1\n\n\t\tcv2.imshow(\"Map Image\", map_ref)\n\n\t\tcv2.waitKey(1)\n\n\t\ttry:\n\t\t\tself.image_pub.publish(self.bridge.cv2_to_imgmsg(cv_image, encoding=\"bgr8\"))\n\t\texcept CvBridgeError, e:\n\t\t\tprint e\n\ndef main(args):\n\tic = labyrinth_solver()\n\trospy.init_node('labyrinth_solver', anonymous=True)\n\ttry:\n\t\trospy.spin()\n\texcept KeyboardInterrupt:\n\t\tprint \"Shutting down\"\n\tcv2.destroyAllWindows()\n \nif __name__ == '__main__':\n\t\tmain(sys.argv)\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import pymongo
import pandas as pd
from scrape_mars import scrape
import json
# Create connection variable
conn = 'mongodb://localhost:27017'
# Pass connection to the pymongo instance.
client = pymongo.MongoClient(conn)
# Connect to a database. Will create one if not already available.
db = client.mars_db
#News
url = "https://mars.nasa.gov/news/?page=0&per_page=40&order=publish_date+desc%2Ccreated_at+desc&search=&category=19%2C165%2C184%2C204&blank_scope=Latest"
info = "News"
# Drops collection if available to remove duplicates
db.news.drop()
#Insert info
#print(scrape(info,url))
db.news.insert_many(scrape(info,url))
#Images
url = "https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars"
info = "Images"
# Drops collection if available to remove duplicates
db.images.drop()
#Insert info
#print(scrape(info,url))
db.images.insert_many(scrape(info,url))
#Weather
url = "https://twitter.com/marswxreport?lang=en"
info = "Weather"
# Drops collection if available to remove duplicates
db.weather.drop()
#Insert info
#print(scrape(info,url))
db.weather.insert_many(scrape(info,url))
#Facts
url = "https://space-facts.com/mars/"
info = "Facts"
# Drops collection if available to remove duplicates
db.facts.drop()
#Insert info
#print(scrape(info,url))
df = pd.DataFrame(scrape(info,url))
df_json = df.to_json()
df_json_list = json.loads(df_json).values()
db.facts.insert(df_json_list)
#Hemispheres
url = "https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars"
info = "Hemis"
# Drops collection if available to remove duplicates
db.hemis.drop()
#Insert info
#print(scrape(info,url))
db.hemis.insert_many(scrape(info,url))
|
normal
|
{
"blob_id": "e3ac8039ffb6787b0e3e80b234c2689c66a184bf",
"index": 1704,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndb.news.drop()\ndb.news.insert_many(scrape(info, url))\n<mask token>\ndb.images.drop()\ndb.images.insert_many(scrape(info, url))\n<mask token>\ndb.weather.drop()\ndb.weather.insert_many(scrape(info, url))\n<mask token>\ndb.facts.drop()\n<mask token>\ndb.facts.insert(df_json_list)\n<mask token>\ndb.hemis.drop()\ndb.hemis.insert_many(scrape(info, url))\n",
"step-3": "<mask token>\nconn = 'mongodb://localhost:27017'\nclient = pymongo.MongoClient(conn)\ndb = client.mars_db\nurl = (\n 'https://mars.nasa.gov/news/?page=0&per_page=40&order=publish_date+desc%2Ccreated_at+desc&search=&category=19%2C165%2C184%2C204&blank_scope=Latest'\n )\ninfo = 'News'\ndb.news.drop()\ndb.news.insert_many(scrape(info, url))\nurl = 'https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars'\ninfo = 'Images'\ndb.images.drop()\ndb.images.insert_many(scrape(info, url))\nurl = 'https://twitter.com/marswxreport?lang=en'\ninfo = 'Weather'\ndb.weather.drop()\ndb.weather.insert_many(scrape(info, url))\nurl = 'https://space-facts.com/mars/'\ninfo = 'Facts'\ndb.facts.drop()\ndf = pd.DataFrame(scrape(info, url))\ndf_json = df.to_json()\ndf_json_list = json.loads(df_json).values()\ndb.facts.insert(df_json_list)\nurl = (\n 'https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars'\n )\ninfo = 'Hemis'\ndb.hemis.drop()\ndb.hemis.insert_many(scrape(info, url))\n",
"step-4": "import pymongo\nimport pandas as pd\nfrom scrape_mars import scrape\nimport json\nconn = 'mongodb://localhost:27017'\nclient = pymongo.MongoClient(conn)\ndb = client.mars_db\nurl = (\n 'https://mars.nasa.gov/news/?page=0&per_page=40&order=publish_date+desc%2Ccreated_at+desc&search=&category=19%2C165%2C184%2C204&blank_scope=Latest'\n )\ninfo = 'News'\ndb.news.drop()\ndb.news.insert_many(scrape(info, url))\nurl = 'https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars'\ninfo = 'Images'\ndb.images.drop()\ndb.images.insert_many(scrape(info, url))\nurl = 'https://twitter.com/marswxreport?lang=en'\ninfo = 'Weather'\ndb.weather.drop()\ndb.weather.insert_many(scrape(info, url))\nurl = 'https://space-facts.com/mars/'\ninfo = 'Facts'\ndb.facts.drop()\ndf = pd.DataFrame(scrape(info, url))\ndf_json = df.to_json()\ndf_json_list = json.loads(df_json).values()\ndb.facts.insert(df_json_list)\nurl = (\n 'https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars'\n )\ninfo = 'Hemis'\ndb.hemis.drop()\ndb.hemis.insert_many(scrape(info, url))\n",
"step-5": "import pymongo\nimport pandas as pd\nfrom scrape_mars import scrape\nimport json\n\n\n# Create connection variable\nconn = 'mongodb://localhost:27017'\n\n\n# Pass connection to the pymongo instance.\nclient = pymongo.MongoClient(conn)\n\n# Connect to a database. Will create one if not already available.\ndb = client.mars_db\n\n\n#News\nurl = \"https://mars.nasa.gov/news/?page=0&per_page=40&order=publish_date+desc%2Ccreated_at+desc&search=&category=19%2C165%2C184%2C204&blank_scope=Latest\"\ninfo = \"News\"\n# Drops collection if available to remove duplicates\ndb.news.drop()\n#Insert info\n#print(scrape(info,url))\ndb.news.insert_many(scrape(info,url))\n\n#Images\nurl = \"https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars\"\ninfo = \"Images\"\n# Drops collection if available to remove duplicates\ndb.images.drop()\n#Insert info\n#print(scrape(info,url))\ndb.images.insert_many(scrape(info,url))\n\n#Weather\nurl = \"https://twitter.com/marswxreport?lang=en\"\ninfo = \"Weather\"\n# Drops collection if available to remove duplicates\ndb.weather.drop()\n#Insert info\n#print(scrape(info,url))\ndb.weather.insert_many(scrape(info,url))\n\n#Facts\nurl = \"https://space-facts.com/mars/\"\ninfo = \"Facts\"\n# Drops collection if available to remove duplicates\ndb.facts.drop()\n#Insert info\n#print(scrape(info,url))\ndf = pd.DataFrame(scrape(info,url))\ndf_json = df.to_json()\ndf_json_list = json.loads(df_json).values()\ndb.facts.insert(df_json_list)\n\n#Hemispheres\nurl = \"https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars\"\ninfo = \"Hemis\"\n# Drops collection if available to remove duplicates\ndb.hemis.drop()\n#Insert info\n#print(scrape(info,url))\ndb.hemis.insert_many(scrape(info,url))",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def testeSelect(db):
cur1 = db.cursor()
cur1.execute('SELECT VERSION()')
data = cur1.fetchone()
print(dir(data))
print('cur1 : %s ' % cur1)
print('Database version : %s ' % data)
def dropTable(db):
cur1 = db.cursor()
cur1.execute('drop table if exists python_demo')
print('dropTable', cur1)
def createTable(db):
cur1 = db.cursor()
sql = """
CREATE TABLE IF NOT EXISTS python_demo (
MEMBER_ID INT(20) NOT NULL AUTO_INCREMENT COMMENT '会员ID',
MEMBER_CODE VARCHAR(20) NOT NULL COMMENT '会员代码',
MEMBER_NAME VARCHAR(128) NOT NULL COMMENT '公司中文名称',
MEMBER_NAME_SHORT VARCHAR(128) NULL DEFAULT NULL COMMENT '公司简称',
COMPANY_NAME_EN VARCHAR(128) NULL DEFAULT NULL COMMENT '公司英文名称',
REG_PLACE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司注册地址',
REG_ADDRESS VARCHAR(128) NULL DEFAULT NULL COMMENT '公司注册地址',
ENT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司性质',
REGCAPITAL DOUBLE(12,2) NULL DEFAULT NULL COMMENT '注册资本',
REG_CURRENCY VARCHAR(20) NULL DEFAULT NULL COMMENT '注册资本币种',
JUDICIAL_PERSON VARCHAR(32) NULL DEFAULT NULL COMMENT '法人名称',
BUSINESS_SCOPE VARCHAR(128) NULL DEFAULT NULL COMMENT '公司经营范围',
COM_TEL VARCHAR(20) NULL DEFAULT NULL COMMENT '公司电话',
COM_FAX VARCHAR(64) NULL DEFAULT NULL COMMENT '公司传真',
PERSON_INCHARGE VARCHAR(64) NULL DEFAULT NULL COMMENT '公司负责人',
ZIP_CODE VARCHAR(6) NULL DEFAULT NULL COMMENT '邮编',
CON_NAME VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人姓名',
CON_MOBILE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人手机',
CON_EMAIL VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人邮箱',
CON_FAX VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人传真',
CON_CERT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件类型',
CON_CERT_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件号',
CON_CERT_DATE DATE NULL DEFAULT NULL COMMENT '联系人证件失效时间',
CON_CER1_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证正面ID',
CON_CER2_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证反面ID',
THERECER_INTGRATED VARCHAR(20) NULL DEFAULT NULL COMMENT '三证合一标志',
BIZLIC_ID INT(20) NULL DEFAULT NULL COMMENT '营业执照ID',
BIZLIC_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '营业执照代码',
BIZLIC_DATE DATE NULL DEFAULT NULL COMMENT '营业执照失效时间',
TAXREGISTER_ID INT(20) NULL DEFAULT NULL COMMENT '税务等级证书ID',
TAXREGISTER_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '税务登记号',
TAXREGISTER_DATE DATE NULL DEFAULT NULL COMMENT '税务登记失效时间',
ORGREG_ID INT(20) NULL DEFAULT NULL COMMENT '组织机构代码证ID',
ORGREG_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '组织机构代码',
ORGREG_DATE DATE NULL DEFAULT NULL COMMENT '组织机构失效时间',
BANK_ID INT(20) NULL DEFAULT NULL COMMENT '银行开户许可证ID',
BANK_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '开户银行类别',
BANK_DEPOSIT VARCHAR(128) NULL DEFAULT NULL COMMENT '开户银行',
BANK_DEPOSIT_CODE VARCHAR(32) NULL DEFAULT NULL COMMENT '开户银行编码',
BANK_ACCOUNTNO VARCHAR(32) NULL DEFAULT NULL COMMENT '银行账号',
BANK_HOUSEHOULD VARCHAR(32) NULL DEFAULT NULL COMMENT '银行户主',
INVOICE_TITLE VARCHAR(128) NULL DEFAULT NULL COMMENT '开票台头',
INVOICE_ADDRESS VARCHAR(20) NULL DEFAULT NULL COMMENT '开票地址',
INVOICE_ADDRESS_DT VARCHAR(128) NULL DEFAULT NULL COMMENT '开票详细地址',
APPLY_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '申请审核机构',
BUYER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '买家标识',
SELLER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家标识',
THIRD_PART_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '第三方标识',
MAIN_USER_ID INT(20) NULL DEFAULT NULL COMMENT '主账号ID',
MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM主数据CODE',
ERP_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'ERP会员CODE',
REG_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '注册类型',
STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '会员状态',
AUDITOR VARCHAR(128) NULL DEFAULT NULL COMMENT '审核人',
AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT '审核时间',
AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT '审核结果',
AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT '审核意见',
MDM_AUDITOR VARCHAR(32) NULL DEFAULT NULL COMMENT 'MDM审核人',
MDM_AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT 'MDM审核时间',
MDM_AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT 'MDM审核意见',
MDM_AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM审核结果',
MEMBER_CHG_ID INT(20) NULL DEFAULT NULL COMMENT '变更ID',
CHANGE_STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '变更状态',
ALIVE_FLAG VARCHAR(1) NOT NULL COMMENT '当前有效状态',
LANG_VER VARCHAR(10) NULL DEFAULT NULL COMMENT '语言类型',
CREATE_USER INT(20) NOT NULL COMMENT '创建者',
CREATE_DATE DATETIME NOT NULL COMMENT '创建时间',
UPDATE_USER INT(20) NULL DEFAULT NULL COMMENT '修改者',
UPDATE_DATE DATETIME NULL DEFAULT NULL COMMENT '修改时间',
DELETE_USER INT(20) NULL DEFAULT NULL COMMENT '删除者',
DELETE_DATE DATETIME NULL DEFAULT NULL COMMENT '删除时间',
BUYER_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '买家类型(01:个人买家;02:公司买家)',
AUDIT_OPTION_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '审核身份标识(01:平台;02:卖家)',
AUDIT_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '审核卖家ID(当审核身份标识为卖家审核时,审核的卖家ID)',
SELLER_MDM VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统',
SELLER_SAP VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家SAP系统',
SELLER_MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统数据CODE',
IS_PLAT_BLACK VARCHAR(2) NULL DEFAULT NULL COMMENT '黑名单状态(41:是;0:否)',
INVOCIE_ADDRESS_LEFT3 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-省',
INVOCIE_ADDRESS_RIGHT5 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-市',
PRIMARY KEY (MEMBER_ID)
)
COMMENT='会员信息表'
COLLATE='utf8_general_ci'
ENGINE=InnoDB
"""
cur1.execute(sql)
print('createTabl', cur1)
<|reserved_special_token_0|>
def insertTable(db):
cur1 = db.cursor()
cur1.execute(
"INSERT INTO python_demo (MEMBER_CODE, MEMBER_NAME, MEMBER_NAME_SHORT, COMPANY_NAME_EN, REG_PLACE, REG_ADDRESS, ENT_TYPE, REGCAPITAL, REG_CURRENCY, JUDICIAL_PERSON, BUSINESS_SCOPE, COM_TEL, COM_FAX, PERSON_INCHARGE, ZIP_CODE, CON_NAME, CON_MOBILE, CON_EMAIL, CON_FAX, CON_CERT_TYPE, CON_CERT_NO, CON_CERT_DATE, CON_CER1_ID, CON_CER2_ID, THERECER_INTGRATED, BIZLIC_ID, BIZLIC_NO, BIZLIC_DATE, TAXREGISTER_ID, TAXREGISTER_NO, TAXREGISTER_DATE, ORGREG_ID, ORGREG_NO, ORGREG_DATE, BANK_ID, BANK_TYPE, BANK_DEPOSIT, BANK_DEPOSIT_CODE, BANK_ACCOUNTNO, BANK_HOUSEHOULD, INVOICE_TITLE, INVOICE_ADDRESS, INVOICE_ADDRESS_DT, APPLY_SELLER_ID, BUYER_FLAG, SELLER_FLAG, THIRD_PART_FLAG, MAIN_USER_ID, MDM_DATA_CODE, ERP_DATA_CODE, REG_TYPE, STATUS, AUDITOR, AUDIT_DATE, AUDIT_RESULT, AUDIT_OPINION, MDM_AUDITOR, MDM_AUDIT_DATE, MDM_AUDIT_OPINION, MDM_AUDIT_RESULT, MEMBER_CHG_ID, CHANGE_STATUS, ALIVE_FLAG, LANG_VER, CREATE_USER, CREATE_DATE, UPDATE_USER, UPDATE_DATE, DELETE_USER, DELETE_DATE, BUYER_TYPE, AUDIT_OPTION_FLAG, AUDIT_SELLER_ID, SELLER_MDM, SELLER_SAP, SELLER_MDM_DATA_CODE, IS_PLAT_BLACK, INVOCIE_ADDRESS_LEFT3, INVOCIE_ADDRESS_RIGHT5) VALUES ('A000001', '中国有限公司', '中国有限公司', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'xinming', '15136378930', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', '1', '0', 2, 'M0000001', '00M0000001', '10', '01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 2, '01', '1', 'ZH-CN', 179143, '2016-05-28 12:16:23', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)"
)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def testeSelect(db):
cur1 = db.cursor()
cur1.execute('SELECT VERSION()')
data = cur1.fetchone()
print(dir(data))
print('cur1 : %s ' % cur1)
print('Database version : %s ' % data)
def dropTable(db):
cur1 = db.cursor()
cur1.execute('drop table if exists python_demo')
print('dropTable', cur1)
def createTable(db):
cur1 = db.cursor()
sql = """
CREATE TABLE IF NOT EXISTS python_demo (
MEMBER_ID INT(20) NOT NULL AUTO_INCREMENT COMMENT '会员ID',
MEMBER_CODE VARCHAR(20) NOT NULL COMMENT '会员代码',
MEMBER_NAME VARCHAR(128) NOT NULL COMMENT '公司中文名称',
MEMBER_NAME_SHORT VARCHAR(128) NULL DEFAULT NULL COMMENT '公司简称',
COMPANY_NAME_EN VARCHAR(128) NULL DEFAULT NULL COMMENT '公司英文名称',
REG_PLACE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司注册地址',
REG_ADDRESS VARCHAR(128) NULL DEFAULT NULL COMMENT '公司注册地址',
ENT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司性质',
REGCAPITAL DOUBLE(12,2) NULL DEFAULT NULL COMMENT '注册资本',
REG_CURRENCY VARCHAR(20) NULL DEFAULT NULL COMMENT '注册资本币种',
JUDICIAL_PERSON VARCHAR(32) NULL DEFAULT NULL COMMENT '法人名称',
BUSINESS_SCOPE VARCHAR(128) NULL DEFAULT NULL COMMENT '公司经营范围',
COM_TEL VARCHAR(20) NULL DEFAULT NULL COMMENT '公司电话',
COM_FAX VARCHAR(64) NULL DEFAULT NULL COMMENT '公司传真',
PERSON_INCHARGE VARCHAR(64) NULL DEFAULT NULL COMMENT '公司负责人',
ZIP_CODE VARCHAR(6) NULL DEFAULT NULL COMMENT '邮编',
CON_NAME VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人姓名',
CON_MOBILE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人手机',
CON_EMAIL VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人邮箱',
CON_FAX VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人传真',
CON_CERT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件类型',
CON_CERT_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件号',
CON_CERT_DATE DATE NULL DEFAULT NULL COMMENT '联系人证件失效时间',
CON_CER1_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证正面ID',
CON_CER2_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证反面ID',
THERECER_INTGRATED VARCHAR(20) NULL DEFAULT NULL COMMENT '三证合一标志',
BIZLIC_ID INT(20) NULL DEFAULT NULL COMMENT '营业执照ID',
BIZLIC_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '营业执照代码',
BIZLIC_DATE DATE NULL DEFAULT NULL COMMENT '营业执照失效时间',
TAXREGISTER_ID INT(20) NULL DEFAULT NULL COMMENT '税务等级证书ID',
TAXREGISTER_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '税务登记号',
TAXREGISTER_DATE DATE NULL DEFAULT NULL COMMENT '税务登记失效时间',
ORGREG_ID INT(20) NULL DEFAULT NULL COMMENT '组织机构代码证ID',
ORGREG_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '组织机构代码',
ORGREG_DATE DATE NULL DEFAULT NULL COMMENT '组织机构失效时间',
BANK_ID INT(20) NULL DEFAULT NULL COMMENT '银行开户许可证ID',
BANK_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '开户银行类别',
BANK_DEPOSIT VARCHAR(128) NULL DEFAULT NULL COMMENT '开户银行',
BANK_DEPOSIT_CODE VARCHAR(32) NULL DEFAULT NULL COMMENT '开户银行编码',
BANK_ACCOUNTNO VARCHAR(32) NULL DEFAULT NULL COMMENT '银行账号',
BANK_HOUSEHOULD VARCHAR(32) NULL DEFAULT NULL COMMENT '银行户主',
INVOICE_TITLE VARCHAR(128) NULL DEFAULT NULL COMMENT '开票台头',
INVOICE_ADDRESS VARCHAR(20) NULL DEFAULT NULL COMMENT '开票地址',
INVOICE_ADDRESS_DT VARCHAR(128) NULL DEFAULT NULL COMMENT '开票详细地址',
APPLY_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '申请审核机构',
BUYER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '买家标识',
SELLER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家标识',
THIRD_PART_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '第三方标识',
MAIN_USER_ID INT(20) NULL DEFAULT NULL COMMENT '主账号ID',
MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM主数据CODE',
ERP_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'ERP会员CODE',
REG_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '注册类型',
STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '会员状态',
AUDITOR VARCHAR(128) NULL DEFAULT NULL COMMENT '审核人',
AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT '审核时间',
AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT '审核结果',
AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT '审核意见',
MDM_AUDITOR VARCHAR(32) NULL DEFAULT NULL COMMENT 'MDM审核人',
MDM_AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT 'MDM审核时间',
MDM_AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT 'MDM审核意见',
MDM_AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM审核结果',
MEMBER_CHG_ID INT(20) NULL DEFAULT NULL COMMENT '变更ID',
CHANGE_STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '变更状态',
ALIVE_FLAG VARCHAR(1) NOT NULL COMMENT '当前有效状态',
LANG_VER VARCHAR(10) NULL DEFAULT NULL COMMENT '语言类型',
CREATE_USER INT(20) NOT NULL COMMENT '创建者',
CREATE_DATE DATETIME NOT NULL COMMENT '创建时间',
UPDATE_USER INT(20) NULL DEFAULT NULL COMMENT '修改者',
UPDATE_DATE DATETIME NULL DEFAULT NULL COMMENT '修改时间',
DELETE_USER INT(20) NULL DEFAULT NULL COMMENT '删除者',
DELETE_DATE DATETIME NULL DEFAULT NULL COMMENT '删除时间',
BUYER_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '买家类型(01:个人买家;02:公司买家)',
AUDIT_OPTION_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '审核身份标识(01:平台;02:卖家)',
AUDIT_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '审核卖家ID(当审核身份标识为卖家审核时,审核的卖家ID)',
SELLER_MDM VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统',
SELLER_SAP VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家SAP系统',
SELLER_MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统数据CODE',
IS_PLAT_BLACK VARCHAR(2) NULL DEFAULT NULL COMMENT '黑名单状态(41:是;0:否)',
INVOCIE_ADDRESS_LEFT3 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-省',
INVOCIE_ADDRESS_RIGHT5 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-市',
PRIMARY KEY (MEMBER_ID)
)
COMMENT='会员信息表'
COLLATE='utf8_general_ci'
ENGINE=InnoDB
"""
cur1.execute(sql)
print('createTabl', cur1)
def selectTable(db):
cur1 = db.cursor()
cur1.execute(
'select member_name,MEMBER_CODE,member_id from python_demo limit 10')
data = cur1.fetchall()
for index, item in enumerate(data):
print(index, sep=' ', end=' ')
for index2, item2 in enumerate(item):
print(item2, sep=' ', end=' ')
print('')
def insertTable(db):
cur1 = db.cursor()
cur1.execute(
"INSERT INTO python_demo (MEMBER_CODE, MEMBER_NAME, MEMBER_NAME_SHORT, COMPANY_NAME_EN, REG_PLACE, REG_ADDRESS, ENT_TYPE, REGCAPITAL, REG_CURRENCY, JUDICIAL_PERSON, BUSINESS_SCOPE, COM_TEL, COM_FAX, PERSON_INCHARGE, ZIP_CODE, CON_NAME, CON_MOBILE, CON_EMAIL, CON_FAX, CON_CERT_TYPE, CON_CERT_NO, CON_CERT_DATE, CON_CER1_ID, CON_CER2_ID, THERECER_INTGRATED, BIZLIC_ID, BIZLIC_NO, BIZLIC_DATE, TAXREGISTER_ID, TAXREGISTER_NO, TAXREGISTER_DATE, ORGREG_ID, ORGREG_NO, ORGREG_DATE, BANK_ID, BANK_TYPE, BANK_DEPOSIT, BANK_DEPOSIT_CODE, BANK_ACCOUNTNO, BANK_HOUSEHOULD, INVOICE_TITLE, INVOICE_ADDRESS, INVOICE_ADDRESS_DT, APPLY_SELLER_ID, BUYER_FLAG, SELLER_FLAG, THIRD_PART_FLAG, MAIN_USER_ID, MDM_DATA_CODE, ERP_DATA_CODE, REG_TYPE, STATUS, AUDITOR, AUDIT_DATE, AUDIT_RESULT, AUDIT_OPINION, MDM_AUDITOR, MDM_AUDIT_DATE, MDM_AUDIT_OPINION, MDM_AUDIT_RESULT, MEMBER_CHG_ID, CHANGE_STATUS, ALIVE_FLAG, LANG_VER, CREATE_USER, CREATE_DATE, UPDATE_USER, UPDATE_DATE, DELETE_USER, DELETE_DATE, BUYER_TYPE, AUDIT_OPTION_FLAG, AUDIT_SELLER_ID, SELLER_MDM, SELLER_SAP, SELLER_MDM_DATA_CODE, IS_PLAT_BLACK, INVOCIE_ADDRESS_LEFT3, INVOCIE_ADDRESS_RIGHT5) VALUES ('A000001', '中国有限公司', '中国有限公司', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'xinming', '15136378930', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', '1', '0', 2, 'M0000001', '00M0000001', '10', '01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 2, '01', '1', 'ZH-CN', 179143, '2016-05-28 12:16:23', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)"
)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def testeSelect(db):
cur1 = db.cursor()
cur1.execute('SELECT VERSION()')
data = cur1.fetchone()
print(dir(data))
print('cur1 : %s ' % cur1)
print('Database version : %s ' % data)
def dropTable(db):
cur1 = db.cursor()
cur1.execute('drop table if exists python_demo')
print('dropTable', cur1)
def createTable(db):
cur1 = db.cursor()
sql = """
CREATE TABLE IF NOT EXISTS python_demo (
MEMBER_ID INT(20) NOT NULL AUTO_INCREMENT COMMENT '会员ID',
MEMBER_CODE VARCHAR(20) NOT NULL COMMENT '会员代码',
MEMBER_NAME VARCHAR(128) NOT NULL COMMENT '公司中文名称',
MEMBER_NAME_SHORT VARCHAR(128) NULL DEFAULT NULL COMMENT '公司简称',
COMPANY_NAME_EN VARCHAR(128) NULL DEFAULT NULL COMMENT '公司英文名称',
REG_PLACE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司注册地址',
REG_ADDRESS VARCHAR(128) NULL DEFAULT NULL COMMENT '公司注册地址',
ENT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司性质',
REGCAPITAL DOUBLE(12,2) NULL DEFAULT NULL COMMENT '注册资本',
REG_CURRENCY VARCHAR(20) NULL DEFAULT NULL COMMENT '注册资本币种',
JUDICIAL_PERSON VARCHAR(32) NULL DEFAULT NULL COMMENT '法人名称',
BUSINESS_SCOPE VARCHAR(128) NULL DEFAULT NULL COMMENT '公司经营范围',
COM_TEL VARCHAR(20) NULL DEFAULT NULL COMMENT '公司电话',
COM_FAX VARCHAR(64) NULL DEFAULT NULL COMMENT '公司传真',
PERSON_INCHARGE VARCHAR(64) NULL DEFAULT NULL COMMENT '公司负责人',
ZIP_CODE VARCHAR(6) NULL DEFAULT NULL COMMENT '邮编',
CON_NAME VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人姓名',
CON_MOBILE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人手机',
CON_EMAIL VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人邮箱',
CON_FAX VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人传真',
CON_CERT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件类型',
CON_CERT_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件号',
CON_CERT_DATE DATE NULL DEFAULT NULL COMMENT '联系人证件失效时间',
CON_CER1_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证正面ID',
CON_CER2_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证反面ID',
THERECER_INTGRATED VARCHAR(20) NULL DEFAULT NULL COMMENT '三证合一标志',
BIZLIC_ID INT(20) NULL DEFAULT NULL COMMENT '营业执照ID',
BIZLIC_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '营业执照代码',
BIZLIC_DATE DATE NULL DEFAULT NULL COMMENT '营业执照失效时间',
TAXREGISTER_ID INT(20) NULL DEFAULT NULL COMMENT '税务等级证书ID',
TAXREGISTER_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '税务登记号',
TAXREGISTER_DATE DATE NULL DEFAULT NULL COMMENT '税务登记失效时间',
ORGREG_ID INT(20) NULL DEFAULT NULL COMMENT '组织机构代码证ID',
ORGREG_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '组织机构代码',
ORGREG_DATE DATE NULL DEFAULT NULL COMMENT '组织机构失效时间',
BANK_ID INT(20) NULL DEFAULT NULL COMMENT '银行开户许可证ID',
BANK_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '开户银行类别',
BANK_DEPOSIT VARCHAR(128) NULL DEFAULT NULL COMMENT '开户银行',
BANK_DEPOSIT_CODE VARCHAR(32) NULL DEFAULT NULL COMMENT '开户银行编码',
BANK_ACCOUNTNO VARCHAR(32) NULL DEFAULT NULL COMMENT '银行账号',
BANK_HOUSEHOULD VARCHAR(32) NULL DEFAULT NULL COMMENT '银行户主',
INVOICE_TITLE VARCHAR(128) NULL DEFAULT NULL COMMENT '开票台头',
INVOICE_ADDRESS VARCHAR(20) NULL DEFAULT NULL COMMENT '开票地址',
INVOICE_ADDRESS_DT VARCHAR(128) NULL DEFAULT NULL COMMENT '开票详细地址',
APPLY_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '申请审核机构',
BUYER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '买家标识',
SELLER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家标识',
THIRD_PART_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '第三方标识',
MAIN_USER_ID INT(20) NULL DEFAULT NULL COMMENT '主账号ID',
MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM主数据CODE',
ERP_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'ERP会员CODE',
REG_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '注册类型',
STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '会员状态',
AUDITOR VARCHAR(128) NULL DEFAULT NULL COMMENT '审核人',
AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT '审核时间',
AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT '审核结果',
AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT '审核意见',
MDM_AUDITOR VARCHAR(32) NULL DEFAULT NULL COMMENT 'MDM审核人',
MDM_AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT 'MDM审核时间',
MDM_AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT 'MDM审核意见',
MDM_AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM审核结果',
MEMBER_CHG_ID INT(20) NULL DEFAULT NULL COMMENT '变更ID',
CHANGE_STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '变更状态',
ALIVE_FLAG VARCHAR(1) NOT NULL COMMENT '当前有效状态',
LANG_VER VARCHAR(10) NULL DEFAULT NULL COMMENT '语言类型',
CREATE_USER INT(20) NOT NULL COMMENT '创建者',
CREATE_DATE DATETIME NOT NULL COMMENT '创建时间',
UPDATE_USER INT(20) NULL DEFAULT NULL COMMENT '修改者',
UPDATE_DATE DATETIME NULL DEFAULT NULL COMMENT '修改时间',
DELETE_USER INT(20) NULL DEFAULT NULL COMMENT '删除者',
DELETE_DATE DATETIME NULL DEFAULT NULL COMMENT '删除时间',
BUYER_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '买家类型(01:个人买家;02:公司买家)',
AUDIT_OPTION_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '审核身份标识(01:平台;02:卖家)',
AUDIT_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '审核卖家ID(当审核身份标识为卖家审核时,审核的卖家ID)',
SELLER_MDM VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统',
SELLER_SAP VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家SAP系统',
SELLER_MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统数据CODE',
IS_PLAT_BLACK VARCHAR(2) NULL DEFAULT NULL COMMENT '黑名单状态(41:是;0:否)',
INVOCIE_ADDRESS_LEFT3 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-省',
INVOCIE_ADDRESS_RIGHT5 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-市',
PRIMARY KEY (MEMBER_ID)
)
COMMENT='会员信息表'
COLLATE='utf8_general_ci'
ENGINE=InnoDB
"""
cur1.execute(sql)
print('createTabl', cur1)
def selectTable(db):
cur1 = db.cursor()
cur1.execute(
'select member_name,MEMBER_CODE,member_id from python_demo limit 10')
data = cur1.fetchall()
for index, item in enumerate(data):
print(index, sep=' ', end=' ')
for index2, item2 in enumerate(item):
print(item2, sep=' ', end=' ')
print('')
def insertTable(db):
cur1 = db.cursor()
cur1.execute(
"INSERT INTO python_demo (MEMBER_CODE, MEMBER_NAME, MEMBER_NAME_SHORT, COMPANY_NAME_EN, REG_PLACE, REG_ADDRESS, ENT_TYPE, REGCAPITAL, REG_CURRENCY, JUDICIAL_PERSON, BUSINESS_SCOPE, COM_TEL, COM_FAX, PERSON_INCHARGE, ZIP_CODE, CON_NAME, CON_MOBILE, CON_EMAIL, CON_FAX, CON_CERT_TYPE, CON_CERT_NO, CON_CERT_DATE, CON_CER1_ID, CON_CER2_ID, THERECER_INTGRATED, BIZLIC_ID, BIZLIC_NO, BIZLIC_DATE, TAXREGISTER_ID, TAXREGISTER_NO, TAXREGISTER_DATE, ORGREG_ID, ORGREG_NO, ORGREG_DATE, BANK_ID, BANK_TYPE, BANK_DEPOSIT, BANK_DEPOSIT_CODE, BANK_ACCOUNTNO, BANK_HOUSEHOULD, INVOICE_TITLE, INVOICE_ADDRESS, INVOICE_ADDRESS_DT, APPLY_SELLER_ID, BUYER_FLAG, SELLER_FLAG, THIRD_PART_FLAG, MAIN_USER_ID, MDM_DATA_CODE, ERP_DATA_CODE, REG_TYPE, STATUS, AUDITOR, AUDIT_DATE, AUDIT_RESULT, AUDIT_OPINION, MDM_AUDITOR, MDM_AUDIT_DATE, MDM_AUDIT_OPINION, MDM_AUDIT_RESULT, MEMBER_CHG_ID, CHANGE_STATUS, ALIVE_FLAG, LANG_VER, CREATE_USER, CREATE_DATE, UPDATE_USER, UPDATE_DATE, DELETE_USER, DELETE_DATE, BUYER_TYPE, AUDIT_OPTION_FLAG, AUDIT_SELLER_ID, SELLER_MDM, SELLER_SAP, SELLER_MDM_DATA_CODE, IS_PLAT_BLACK, INVOCIE_ADDRESS_LEFT3, INVOCIE_ADDRESS_RIGHT5) VALUES ('A000001', '中国有限公司', '中国有限公司', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'xinming', '15136378930', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', '1', '0', 2, 'M0000001', '00M0000001', '10', '01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 2, '01', '1', 'ZH-CN', 179143, '2016-05-28 12:16:23', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)"
)
<|reserved_special_token_0|>
dropTable(db)
createTable(db)
insertTable(db)
insertTable(db)
insertTable(db)
insertTable(db)
insertTable(db)
testeSelect(db)
selectTable(db)
db.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def testeSelect(db):
cur1 = db.cursor()
cur1.execute('SELECT VERSION()')
data = cur1.fetchone()
print(dir(data))
print('cur1 : %s ' % cur1)
print('Database version : %s ' % data)
def dropTable(db):
cur1 = db.cursor()
cur1.execute('drop table if exists python_demo')
print('dropTable', cur1)
def createTable(db):
cur1 = db.cursor()
sql = """
CREATE TABLE IF NOT EXISTS python_demo (
MEMBER_ID INT(20) NOT NULL AUTO_INCREMENT COMMENT '会员ID',
MEMBER_CODE VARCHAR(20) NOT NULL COMMENT '会员代码',
MEMBER_NAME VARCHAR(128) NOT NULL COMMENT '公司中文名称',
MEMBER_NAME_SHORT VARCHAR(128) NULL DEFAULT NULL COMMENT '公司简称',
COMPANY_NAME_EN VARCHAR(128) NULL DEFAULT NULL COMMENT '公司英文名称',
REG_PLACE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司注册地址',
REG_ADDRESS VARCHAR(128) NULL DEFAULT NULL COMMENT '公司注册地址',
ENT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司性质',
REGCAPITAL DOUBLE(12,2) NULL DEFAULT NULL COMMENT '注册资本',
REG_CURRENCY VARCHAR(20) NULL DEFAULT NULL COMMENT '注册资本币种',
JUDICIAL_PERSON VARCHAR(32) NULL DEFAULT NULL COMMENT '法人名称',
BUSINESS_SCOPE VARCHAR(128) NULL DEFAULT NULL COMMENT '公司经营范围',
COM_TEL VARCHAR(20) NULL DEFAULT NULL COMMENT '公司电话',
COM_FAX VARCHAR(64) NULL DEFAULT NULL COMMENT '公司传真',
PERSON_INCHARGE VARCHAR(64) NULL DEFAULT NULL COMMENT '公司负责人',
ZIP_CODE VARCHAR(6) NULL DEFAULT NULL COMMENT '邮编',
CON_NAME VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人姓名',
CON_MOBILE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人手机',
CON_EMAIL VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人邮箱',
CON_FAX VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人传真',
CON_CERT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件类型',
CON_CERT_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件号',
CON_CERT_DATE DATE NULL DEFAULT NULL COMMENT '联系人证件失效时间',
CON_CER1_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证正面ID',
CON_CER2_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证反面ID',
THERECER_INTGRATED VARCHAR(20) NULL DEFAULT NULL COMMENT '三证合一标志',
BIZLIC_ID INT(20) NULL DEFAULT NULL COMMENT '营业执照ID',
BIZLIC_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '营业执照代码',
BIZLIC_DATE DATE NULL DEFAULT NULL COMMENT '营业执照失效时间',
TAXREGISTER_ID INT(20) NULL DEFAULT NULL COMMENT '税务等级证书ID',
TAXREGISTER_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '税务登记号',
TAXREGISTER_DATE DATE NULL DEFAULT NULL COMMENT '税务登记失效时间',
ORGREG_ID INT(20) NULL DEFAULT NULL COMMENT '组织机构代码证ID',
ORGREG_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '组织机构代码',
ORGREG_DATE DATE NULL DEFAULT NULL COMMENT '组织机构失效时间',
BANK_ID INT(20) NULL DEFAULT NULL COMMENT '银行开户许可证ID',
BANK_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '开户银行类别',
BANK_DEPOSIT VARCHAR(128) NULL DEFAULT NULL COMMENT '开户银行',
BANK_DEPOSIT_CODE VARCHAR(32) NULL DEFAULT NULL COMMENT '开户银行编码',
BANK_ACCOUNTNO VARCHAR(32) NULL DEFAULT NULL COMMENT '银行账号',
BANK_HOUSEHOULD VARCHAR(32) NULL DEFAULT NULL COMMENT '银行户主',
INVOICE_TITLE VARCHAR(128) NULL DEFAULT NULL COMMENT '开票台头',
INVOICE_ADDRESS VARCHAR(20) NULL DEFAULT NULL COMMENT '开票地址',
INVOICE_ADDRESS_DT VARCHAR(128) NULL DEFAULT NULL COMMENT '开票详细地址',
APPLY_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '申请审核机构',
BUYER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '买家标识',
SELLER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家标识',
THIRD_PART_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '第三方标识',
MAIN_USER_ID INT(20) NULL DEFAULT NULL COMMENT '主账号ID',
MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM主数据CODE',
ERP_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'ERP会员CODE',
REG_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '注册类型',
STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '会员状态',
AUDITOR VARCHAR(128) NULL DEFAULT NULL COMMENT '审核人',
AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT '审核时间',
AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT '审核结果',
AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT '审核意见',
MDM_AUDITOR VARCHAR(32) NULL DEFAULT NULL COMMENT 'MDM审核人',
MDM_AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT 'MDM审核时间',
MDM_AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT 'MDM审核意见',
MDM_AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM审核结果',
MEMBER_CHG_ID INT(20) NULL DEFAULT NULL COMMENT '变更ID',
CHANGE_STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '变更状态',
ALIVE_FLAG VARCHAR(1) NOT NULL COMMENT '当前有效状态',
LANG_VER VARCHAR(10) NULL DEFAULT NULL COMMENT '语言类型',
CREATE_USER INT(20) NOT NULL COMMENT '创建者',
CREATE_DATE DATETIME NOT NULL COMMENT '创建时间',
UPDATE_USER INT(20) NULL DEFAULT NULL COMMENT '修改者',
UPDATE_DATE DATETIME NULL DEFAULT NULL COMMENT '修改时间',
DELETE_USER INT(20) NULL DEFAULT NULL COMMENT '删除者',
DELETE_DATE DATETIME NULL DEFAULT NULL COMMENT '删除时间',
BUYER_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '买家类型(01:个人买家;02:公司买家)',
AUDIT_OPTION_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '审核身份标识(01:平台;02:卖家)',
AUDIT_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '审核卖家ID(当审核身份标识为卖家审核时,审核的卖家ID)',
SELLER_MDM VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统',
SELLER_SAP VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家SAP系统',
SELLER_MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统数据CODE',
IS_PLAT_BLACK VARCHAR(2) NULL DEFAULT NULL COMMENT '黑名单状态(41:是;0:否)',
INVOCIE_ADDRESS_LEFT3 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-省',
INVOCIE_ADDRESS_RIGHT5 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-市',
PRIMARY KEY (MEMBER_ID)
)
COMMENT='会员信息表'
COLLATE='utf8_general_ci'
ENGINE=InnoDB
"""
cur1.execute(sql)
print('createTabl', cur1)
def selectTable(db):
cur1 = db.cursor()
cur1.execute(
'select member_name,MEMBER_CODE,member_id from python_demo limit 10')
data = cur1.fetchall()
for index, item in enumerate(data):
print(index, sep=' ', end=' ')
for index2, item2 in enumerate(item):
print(item2, sep=' ', end=' ')
print('')
def insertTable(db):
cur1 = db.cursor()
cur1.execute(
"INSERT INTO python_demo (MEMBER_CODE, MEMBER_NAME, MEMBER_NAME_SHORT, COMPANY_NAME_EN, REG_PLACE, REG_ADDRESS, ENT_TYPE, REGCAPITAL, REG_CURRENCY, JUDICIAL_PERSON, BUSINESS_SCOPE, COM_TEL, COM_FAX, PERSON_INCHARGE, ZIP_CODE, CON_NAME, CON_MOBILE, CON_EMAIL, CON_FAX, CON_CERT_TYPE, CON_CERT_NO, CON_CERT_DATE, CON_CER1_ID, CON_CER2_ID, THERECER_INTGRATED, BIZLIC_ID, BIZLIC_NO, BIZLIC_DATE, TAXREGISTER_ID, TAXREGISTER_NO, TAXREGISTER_DATE, ORGREG_ID, ORGREG_NO, ORGREG_DATE, BANK_ID, BANK_TYPE, BANK_DEPOSIT, BANK_DEPOSIT_CODE, BANK_ACCOUNTNO, BANK_HOUSEHOULD, INVOICE_TITLE, INVOICE_ADDRESS, INVOICE_ADDRESS_DT, APPLY_SELLER_ID, BUYER_FLAG, SELLER_FLAG, THIRD_PART_FLAG, MAIN_USER_ID, MDM_DATA_CODE, ERP_DATA_CODE, REG_TYPE, STATUS, AUDITOR, AUDIT_DATE, AUDIT_RESULT, AUDIT_OPINION, MDM_AUDITOR, MDM_AUDIT_DATE, MDM_AUDIT_OPINION, MDM_AUDIT_RESULT, MEMBER_CHG_ID, CHANGE_STATUS, ALIVE_FLAG, LANG_VER, CREATE_USER, CREATE_DATE, UPDATE_USER, UPDATE_DATE, DELETE_USER, DELETE_DATE, BUYER_TYPE, AUDIT_OPTION_FLAG, AUDIT_SELLER_ID, SELLER_MDM, SELLER_SAP, SELLER_MDM_DATA_CODE, IS_PLAT_BLACK, INVOCIE_ADDRESS_LEFT3, INVOCIE_ADDRESS_RIGHT5) VALUES ('A000001', '中国有限公司', '中国有限公司', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'xinming', '15136378930', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', '1', '0', 2, 'M0000001', '00M0000001', '10', '01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 2, '01', '1', 'ZH-CN', 179143, '2016-05-28 12:16:23', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)"
)
db = pymysql.connect(host='127.0.0.1', user='pu3147', password='1qaz@WSX',
database='demo', port=3306, charset='UTF8')
dropTable(db)
createTable(db)
insertTable(db)
insertTable(db)
insertTable(db)
insertTable(db)
insertTable(db)
testeSelect(db)
selectTable(db)
db.close()
<|reserved_special_token_1|>
import pymysql
def testeSelect(db):
#创建查询游标
cur1 = db.cursor()
# 使用 execute() 方法执行 SQL 查询
cur1.execute("SELECT VERSION()")
# 使用 fetchone() 方法获取单条数据.
data = cur1.fetchone()
print(dir(data))
print ("cur1 : %s " % cur1)
print ("Database version : %s " % data)
def dropTable(db):
#创建查询游标
cur1 = db.cursor()
cur1.execute("drop table if exists python_demo")
print('dropTable',cur1)
def createTable(db):
#创建查询游标
cur1 = db.cursor()
sql = '''
CREATE TABLE IF NOT EXISTS python_demo (
MEMBER_ID INT(20) NOT NULL AUTO_INCREMENT COMMENT '会员ID',
MEMBER_CODE VARCHAR(20) NOT NULL COMMENT '会员代码',
MEMBER_NAME VARCHAR(128) NOT NULL COMMENT '公司中文名称',
MEMBER_NAME_SHORT VARCHAR(128) NULL DEFAULT NULL COMMENT '公司简称',
COMPANY_NAME_EN VARCHAR(128) NULL DEFAULT NULL COMMENT '公司英文名称',
REG_PLACE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司注册地址',
REG_ADDRESS VARCHAR(128) NULL DEFAULT NULL COMMENT '公司注册地址',
ENT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司性质',
REGCAPITAL DOUBLE(12,2) NULL DEFAULT NULL COMMENT '注册资本',
REG_CURRENCY VARCHAR(20) NULL DEFAULT NULL COMMENT '注册资本币种',
JUDICIAL_PERSON VARCHAR(32) NULL DEFAULT NULL COMMENT '法人名称',
BUSINESS_SCOPE VARCHAR(128) NULL DEFAULT NULL COMMENT '公司经营范围',
COM_TEL VARCHAR(20) NULL DEFAULT NULL COMMENT '公司电话',
COM_FAX VARCHAR(64) NULL DEFAULT NULL COMMENT '公司传真',
PERSON_INCHARGE VARCHAR(64) NULL DEFAULT NULL COMMENT '公司负责人',
ZIP_CODE VARCHAR(6) NULL DEFAULT NULL COMMENT '邮编',
CON_NAME VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人姓名',
CON_MOBILE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人手机',
CON_EMAIL VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人邮箱',
CON_FAX VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人传真',
CON_CERT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件类型',
CON_CERT_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件号',
CON_CERT_DATE DATE NULL DEFAULT NULL COMMENT '联系人证件失效时间',
CON_CER1_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证正面ID',
CON_CER2_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证反面ID',
THERECER_INTGRATED VARCHAR(20) NULL DEFAULT NULL COMMENT '三证合一标志',
BIZLIC_ID INT(20) NULL DEFAULT NULL COMMENT '营业执照ID',
BIZLIC_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '营业执照代码',
BIZLIC_DATE DATE NULL DEFAULT NULL COMMENT '营业执照失效时间',
TAXREGISTER_ID INT(20) NULL DEFAULT NULL COMMENT '税务等级证书ID',
TAXREGISTER_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '税务登记号',
TAXREGISTER_DATE DATE NULL DEFAULT NULL COMMENT '税务登记失效时间',
ORGREG_ID INT(20) NULL DEFAULT NULL COMMENT '组织机构代码证ID',
ORGREG_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '组织机构代码',
ORGREG_DATE DATE NULL DEFAULT NULL COMMENT '组织机构失效时间',
BANK_ID INT(20) NULL DEFAULT NULL COMMENT '银行开户许可证ID',
BANK_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '开户银行类别',
BANK_DEPOSIT VARCHAR(128) NULL DEFAULT NULL COMMENT '开户银行',
BANK_DEPOSIT_CODE VARCHAR(32) NULL DEFAULT NULL COMMENT '开户银行编码',
BANK_ACCOUNTNO VARCHAR(32) NULL DEFAULT NULL COMMENT '银行账号',
BANK_HOUSEHOULD VARCHAR(32) NULL DEFAULT NULL COMMENT '银行户主',
INVOICE_TITLE VARCHAR(128) NULL DEFAULT NULL COMMENT '开票台头',
INVOICE_ADDRESS VARCHAR(20) NULL DEFAULT NULL COMMENT '开票地址',
INVOICE_ADDRESS_DT VARCHAR(128) NULL DEFAULT NULL COMMENT '开票详细地址',
APPLY_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '申请审核机构',
BUYER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '买家标识',
SELLER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家标识',
THIRD_PART_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '第三方标识',
MAIN_USER_ID INT(20) NULL DEFAULT NULL COMMENT '主账号ID',
MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM主数据CODE',
ERP_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'ERP会员CODE',
REG_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '注册类型',
STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '会员状态',
AUDITOR VARCHAR(128) NULL DEFAULT NULL COMMENT '审核人',
AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT '审核时间',
AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT '审核结果',
AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT '审核意见',
MDM_AUDITOR VARCHAR(32) NULL DEFAULT NULL COMMENT 'MDM审核人',
MDM_AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT 'MDM审核时间',
MDM_AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT 'MDM审核意见',
MDM_AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM审核结果',
MEMBER_CHG_ID INT(20) NULL DEFAULT NULL COMMENT '变更ID',
CHANGE_STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '变更状态',
ALIVE_FLAG VARCHAR(1) NOT NULL COMMENT '当前有效状态',
LANG_VER VARCHAR(10) NULL DEFAULT NULL COMMENT '语言类型',
CREATE_USER INT(20) NOT NULL COMMENT '创建者',
CREATE_DATE DATETIME NOT NULL COMMENT '创建时间',
UPDATE_USER INT(20) NULL DEFAULT NULL COMMENT '修改者',
UPDATE_DATE DATETIME NULL DEFAULT NULL COMMENT '修改时间',
DELETE_USER INT(20) NULL DEFAULT NULL COMMENT '删除者',
DELETE_DATE DATETIME NULL DEFAULT NULL COMMENT '删除时间',
BUYER_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '买家类型(01:个人买家;02:公司买家)',
AUDIT_OPTION_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '审核身份标识(01:平台;02:卖家)',
AUDIT_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '审核卖家ID(当审核身份标识为卖家审核时,审核的卖家ID)',
SELLER_MDM VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统',
SELLER_SAP VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家SAP系统',
SELLER_MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统数据CODE',
IS_PLAT_BLACK VARCHAR(2) NULL DEFAULT NULL COMMENT '黑名单状态(41:是;0:否)',
INVOCIE_ADDRESS_LEFT3 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-省',
INVOCIE_ADDRESS_RIGHT5 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-市',
PRIMARY KEY (MEMBER_ID)
)
COMMENT='会员信息表'
COLLATE='utf8_general_ci'
ENGINE=InnoDB
'''
cur1.execute(sql)
print('createTabl',cur1)
def selectTable(db):
#创建查询游标
cur1 = db.cursor()
cur1.execute("select member_name,MEMBER_CODE,member_id from python_demo limit 10")
# 使用 fetchall() 接收全部的返回结果行
data = cur1.fetchall()
for index,item in enumerate(data):
print(index,sep=' ', end=' ')
for index2,item2 in enumerate(item):
print(item2,sep=' ', end=' ')
print("")
def insertTable(db):
#创建查询游标
cur1 = db.cursor()
cur1.execute("INSERT INTO python_demo (MEMBER_CODE, MEMBER_NAME, MEMBER_NAME_SHORT, COMPANY_NAME_EN, REG_PLACE, REG_ADDRESS, ENT_TYPE, REGCAPITAL, REG_CURRENCY, JUDICIAL_PERSON, BUSINESS_SCOPE, COM_TEL, COM_FAX, PERSON_INCHARGE, ZIP_CODE, CON_NAME, CON_MOBILE, CON_EMAIL, CON_FAX, CON_CERT_TYPE, CON_CERT_NO, CON_CERT_DATE, CON_CER1_ID, CON_CER2_ID, THERECER_INTGRATED, BIZLIC_ID, BIZLIC_NO, BIZLIC_DATE, TAXREGISTER_ID, TAXREGISTER_NO, TAXREGISTER_DATE, ORGREG_ID, ORGREG_NO, ORGREG_DATE, BANK_ID, BANK_TYPE, BANK_DEPOSIT, BANK_DEPOSIT_CODE, BANK_ACCOUNTNO, BANK_HOUSEHOULD, INVOICE_TITLE, INVOICE_ADDRESS, INVOICE_ADDRESS_DT, APPLY_SELLER_ID, BUYER_FLAG, SELLER_FLAG, THIRD_PART_FLAG, MAIN_USER_ID, MDM_DATA_CODE, ERP_DATA_CODE, REG_TYPE, STATUS, AUDITOR, AUDIT_DATE, AUDIT_RESULT, AUDIT_OPINION, MDM_AUDITOR, MDM_AUDIT_DATE, MDM_AUDIT_OPINION, MDM_AUDIT_RESULT, MEMBER_CHG_ID, CHANGE_STATUS, ALIVE_FLAG, LANG_VER, CREATE_USER, CREATE_DATE, UPDATE_USER, UPDATE_DATE, DELETE_USER, DELETE_DATE, BUYER_TYPE, AUDIT_OPTION_FLAG, AUDIT_SELLER_ID, SELLER_MDM, SELLER_SAP, SELLER_MDM_DATA_CODE, IS_PLAT_BLACK, INVOCIE_ADDRESS_LEFT3, INVOCIE_ADDRESS_RIGHT5) VALUES ('A000001', '中国有限公司', '中国有限公司', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'xinming', '15136378930', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', '1', '0', 2, 'M0000001', '00M0000001', '10', '01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 2, '01', '1', 'ZH-CN', 179143, '2016-05-28 12:16:23', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)")
# 使用 fetchall() 接收全部的返回结果行
#data = cur1.rowcount()
#print('insertTable',data)
# 打开数据库连接
db = pymysql.connect(host='127.0.0.1',user='pu3147',
password='1qaz@WSX',database='demo',port=3306,charset='UTF8')
dropTable(db)
createTable(db)
insertTable(db)
insertTable(db)
insertTable(db)
insertTable(db)
insertTable(db)
testeSelect(db)
selectTable(db)
# 关闭数据库连接
db.close()
|
flexible
|
{
"blob_id": "75133dd924f8f3f028075c5d2109bb79ddc7fe87",
"index": 434,
"step-1": "<mask token>\n\n\ndef testeSelect(db):\n cur1 = db.cursor()\n cur1.execute('SELECT VERSION()')\n data = cur1.fetchone()\n print(dir(data))\n print('cur1 : %s ' % cur1)\n print('Database version : %s ' % data)\n\n\ndef dropTable(db):\n cur1 = db.cursor()\n cur1.execute('drop table if exists python_demo')\n print('dropTable', cur1)\n\n\ndef createTable(db):\n cur1 = db.cursor()\n sql = \"\"\"\n\t\t\tCREATE TABLE IF NOT EXISTS python_demo (\n\t\t\t\tMEMBER_ID INT(20) NOT NULL AUTO_INCREMENT COMMENT '会员ID',\n\t\t\t\tMEMBER_CODE VARCHAR(20) NOT NULL COMMENT '会员代码',\n\t\t\t\tMEMBER_NAME VARCHAR(128) NOT NULL COMMENT '公司中文名称',\n\t\t\t\tMEMBER_NAME_SHORT VARCHAR(128) NULL DEFAULT NULL COMMENT '公司简称',\n\t\t\t\tCOMPANY_NAME_EN VARCHAR(128) NULL DEFAULT NULL COMMENT '公司英文名称',\n\t\t\t\tREG_PLACE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司注册地址',\n\t\t\t\tREG_ADDRESS VARCHAR(128) NULL DEFAULT NULL COMMENT '公司注册地址',\n\t\t\t\tENT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司性质',\n\t\t\t\tREGCAPITAL DOUBLE(12,2) NULL DEFAULT NULL COMMENT '注册资本',\n\t\t\t\tREG_CURRENCY VARCHAR(20) NULL DEFAULT NULL COMMENT '注册资本币种',\n\t\t\t\tJUDICIAL_PERSON VARCHAR(32) NULL DEFAULT NULL COMMENT '法人名称',\n\t\t\t\tBUSINESS_SCOPE VARCHAR(128) NULL DEFAULT NULL COMMENT '公司经营范围',\n\t\t\t\tCOM_TEL VARCHAR(20) NULL DEFAULT NULL COMMENT '公司电话',\n\t\t\t\tCOM_FAX VARCHAR(64) NULL DEFAULT NULL COMMENT '公司传真',\n\t\t\t\tPERSON_INCHARGE VARCHAR(64) NULL DEFAULT NULL COMMENT '公司负责人',\n\t\t\t\tZIP_CODE VARCHAR(6) NULL DEFAULT NULL COMMENT '邮编',\n\t\t\t\tCON_NAME VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人姓名',\n\t\t\t\tCON_MOBILE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人手机',\n\t\t\t\tCON_EMAIL VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人邮箱',\n\t\t\t\tCON_FAX VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人传真',\n\t\t\t\tCON_CERT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件类型',\n\t\t\t\tCON_CERT_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件号',\n\t\t\t\tCON_CERT_DATE DATE NULL DEFAULT NULL COMMENT '联系人证件失效时间',\n\t\t\t\tCON_CER1_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证正面ID',\n\t\t\t\tCON_CER2_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证反面ID',\n\t\t\t\tTHERECER_INTGRATED VARCHAR(20) NULL DEFAULT NULL COMMENT '三证合一标志',\n\t\t\t\tBIZLIC_ID INT(20) NULL DEFAULT NULL COMMENT '营业执照ID',\n\t\t\t\tBIZLIC_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '营业执照代码',\n\t\t\t\tBIZLIC_DATE DATE NULL DEFAULT NULL COMMENT '营业执照失效时间',\n\t\t\t\tTAXREGISTER_ID INT(20) NULL DEFAULT NULL COMMENT '税务等级证书ID',\n\t\t\t\tTAXREGISTER_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '税务登记号',\n\t\t\t\tTAXREGISTER_DATE DATE NULL DEFAULT NULL COMMENT '税务登记失效时间',\n\t\t\t\tORGREG_ID INT(20) NULL DEFAULT NULL COMMENT '组织机构代码证ID',\n\t\t\t\tORGREG_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '组织机构代码',\n\t\t\t\tORGREG_DATE DATE NULL DEFAULT NULL COMMENT '组织机构失效时间',\n\t\t\t\tBANK_ID INT(20) NULL DEFAULT NULL COMMENT '银行开户许可证ID',\n\t\t\t\tBANK_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '开户银行类别',\n\t\t\t\tBANK_DEPOSIT VARCHAR(128) NULL DEFAULT NULL COMMENT '开户银行',\n\t\t\t\tBANK_DEPOSIT_CODE VARCHAR(32) NULL DEFAULT NULL COMMENT '开户银行编码',\n\t\t\t\tBANK_ACCOUNTNO VARCHAR(32) NULL DEFAULT NULL COMMENT '银行账号',\n\t\t\t\tBANK_HOUSEHOULD VARCHAR(32) NULL DEFAULT NULL COMMENT '银行户主',\n\t\t\t\tINVOICE_TITLE VARCHAR(128) NULL DEFAULT NULL COMMENT '开票台头',\n\t\t\t\tINVOICE_ADDRESS VARCHAR(20) NULL DEFAULT NULL COMMENT '开票地址',\n\t\t\t\tINVOICE_ADDRESS_DT VARCHAR(128) NULL DEFAULT NULL COMMENT '开票详细地址',\n\t\t\t\tAPPLY_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '申请审核机构',\n\t\t\t\tBUYER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '买家标识',\n\t\t\t\tSELLER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家标识',\n\t\t\t\tTHIRD_PART_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '第三方标识',\n\t\t\t\tMAIN_USER_ID INT(20) NULL DEFAULT NULL COMMENT '主账号ID',\n\t\t\t\tMDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM主数据CODE',\n\t\t\t\tERP_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'ERP会员CODE',\n\t\t\t\tREG_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '注册类型',\n\t\t\t\tSTATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '会员状态',\n\t\t\t\tAUDITOR VARCHAR(128) NULL DEFAULT NULL COMMENT '审核人',\n\t\t\t\tAUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT '审核时间',\n\t\t\t\tAUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT '审核结果',\n\t\t\t\tAUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT '审核意见',\n\t\t\t\tMDM_AUDITOR VARCHAR(32) NULL DEFAULT NULL COMMENT 'MDM审核人',\n\t\t\t\tMDM_AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT 'MDM审核时间',\n\t\t\t\tMDM_AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT 'MDM审核意见',\n\t\t\t\tMDM_AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM审核结果',\n\t\t\t\tMEMBER_CHG_ID INT(20) NULL DEFAULT NULL COMMENT '变更ID',\n\t\t\t\tCHANGE_STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '变更状态',\n\t\t\t\tALIVE_FLAG VARCHAR(1) NOT NULL COMMENT '当前有效状态',\n\t\t\t\tLANG_VER VARCHAR(10) NULL DEFAULT NULL COMMENT '语言类型',\n\t\t\t\tCREATE_USER INT(20) NOT NULL COMMENT '创建者',\n\t\t\t\tCREATE_DATE DATETIME NOT NULL COMMENT '创建时间',\n\t\t\t\tUPDATE_USER INT(20) NULL DEFAULT NULL COMMENT '修改者',\n\t\t\t\tUPDATE_DATE DATETIME NULL DEFAULT NULL COMMENT '修改时间',\n\t\t\t\tDELETE_USER INT(20) NULL DEFAULT NULL COMMENT '删除者',\n\t\t\t\tDELETE_DATE DATETIME NULL DEFAULT NULL COMMENT '删除时间',\n\t\t\t\tBUYER_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '买家类型(01:个人买家;02:公司买家)',\n\t\t\t\tAUDIT_OPTION_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '审核身份标识(01:平台;02:卖家)',\n\t\t\t\tAUDIT_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '审核卖家ID(当审核身份标识为卖家审核时,审核的卖家ID)',\n\t\t\t\tSELLER_MDM VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统',\n\t\t\t\tSELLER_SAP VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家SAP系统',\n\t\t\t\tSELLER_MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统数据CODE',\n\t\t\t\tIS_PLAT_BLACK VARCHAR(2) NULL DEFAULT NULL COMMENT '黑名单状态(41:是;0:否)',\n\t\t\t\tINVOCIE_ADDRESS_LEFT3 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-省',\n\t\t\t\tINVOCIE_ADDRESS_RIGHT5 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-市',\n\t\t\t\tPRIMARY KEY (MEMBER_ID)\n\t\t\t)\n\t\t\tCOMMENT='会员信息表'\n\t\t\tCOLLATE='utf8_general_ci'\n\t\t\tENGINE=InnoDB\n\t\"\"\"\n cur1.execute(sql)\n print('createTabl', cur1)\n\n\n<mask token>\n\n\ndef insertTable(db):\n cur1 = db.cursor()\n cur1.execute(\n \"INSERT INTO python_demo (MEMBER_CODE, MEMBER_NAME, MEMBER_NAME_SHORT, COMPANY_NAME_EN, REG_PLACE, REG_ADDRESS, ENT_TYPE, REGCAPITAL, REG_CURRENCY, JUDICIAL_PERSON, BUSINESS_SCOPE, COM_TEL, COM_FAX, PERSON_INCHARGE, ZIP_CODE, CON_NAME, CON_MOBILE, CON_EMAIL, CON_FAX, CON_CERT_TYPE, CON_CERT_NO, CON_CERT_DATE, CON_CER1_ID, CON_CER2_ID, THERECER_INTGRATED, BIZLIC_ID, BIZLIC_NO, BIZLIC_DATE, TAXREGISTER_ID, TAXREGISTER_NO, TAXREGISTER_DATE, ORGREG_ID, ORGREG_NO, ORGREG_DATE, BANK_ID, BANK_TYPE, BANK_DEPOSIT, BANK_DEPOSIT_CODE, BANK_ACCOUNTNO, BANK_HOUSEHOULD, INVOICE_TITLE, INVOICE_ADDRESS, INVOICE_ADDRESS_DT, APPLY_SELLER_ID, BUYER_FLAG, SELLER_FLAG, THIRD_PART_FLAG, MAIN_USER_ID, MDM_DATA_CODE, ERP_DATA_CODE, REG_TYPE, STATUS, AUDITOR, AUDIT_DATE, AUDIT_RESULT, AUDIT_OPINION, MDM_AUDITOR, MDM_AUDIT_DATE, MDM_AUDIT_OPINION, MDM_AUDIT_RESULT, MEMBER_CHG_ID, CHANGE_STATUS, ALIVE_FLAG, LANG_VER, CREATE_USER, CREATE_DATE, UPDATE_USER, UPDATE_DATE, DELETE_USER, DELETE_DATE, BUYER_TYPE, AUDIT_OPTION_FLAG, AUDIT_SELLER_ID, SELLER_MDM, SELLER_SAP, SELLER_MDM_DATA_CODE, IS_PLAT_BLACK, INVOCIE_ADDRESS_LEFT3, INVOCIE_ADDRESS_RIGHT5) VALUES ('A000001', '中国有限公司', '中国有限公司', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'xinming', '15136378930', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', '1', '0', 2, 'M0000001', '00M0000001', '10', '01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 2, '01', '1', 'ZH-CN', 179143, '2016-05-28 12:16:23', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)\"\n )\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef testeSelect(db):\n cur1 = db.cursor()\n cur1.execute('SELECT VERSION()')\n data = cur1.fetchone()\n print(dir(data))\n print('cur1 : %s ' % cur1)\n print('Database version : %s ' % data)\n\n\ndef dropTable(db):\n cur1 = db.cursor()\n cur1.execute('drop table if exists python_demo')\n print('dropTable', cur1)\n\n\ndef createTable(db):\n cur1 = db.cursor()\n sql = \"\"\"\n\t\t\tCREATE TABLE IF NOT EXISTS python_demo (\n\t\t\t\tMEMBER_ID INT(20) NOT NULL AUTO_INCREMENT COMMENT '会员ID',\n\t\t\t\tMEMBER_CODE VARCHAR(20) NOT NULL COMMENT '会员代码',\n\t\t\t\tMEMBER_NAME VARCHAR(128) NOT NULL COMMENT '公司中文名称',\n\t\t\t\tMEMBER_NAME_SHORT VARCHAR(128) NULL DEFAULT NULL COMMENT '公司简称',\n\t\t\t\tCOMPANY_NAME_EN VARCHAR(128) NULL DEFAULT NULL COMMENT '公司英文名称',\n\t\t\t\tREG_PLACE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司注册地址',\n\t\t\t\tREG_ADDRESS VARCHAR(128) NULL DEFAULT NULL COMMENT '公司注册地址',\n\t\t\t\tENT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司性质',\n\t\t\t\tREGCAPITAL DOUBLE(12,2) NULL DEFAULT NULL COMMENT '注册资本',\n\t\t\t\tREG_CURRENCY VARCHAR(20) NULL DEFAULT NULL COMMENT '注册资本币种',\n\t\t\t\tJUDICIAL_PERSON VARCHAR(32) NULL DEFAULT NULL COMMENT '法人名称',\n\t\t\t\tBUSINESS_SCOPE VARCHAR(128) NULL DEFAULT NULL COMMENT '公司经营范围',\n\t\t\t\tCOM_TEL VARCHAR(20) NULL DEFAULT NULL COMMENT '公司电话',\n\t\t\t\tCOM_FAX VARCHAR(64) NULL DEFAULT NULL COMMENT '公司传真',\n\t\t\t\tPERSON_INCHARGE VARCHAR(64) NULL DEFAULT NULL COMMENT '公司负责人',\n\t\t\t\tZIP_CODE VARCHAR(6) NULL DEFAULT NULL COMMENT '邮编',\n\t\t\t\tCON_NAME VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人姓名',\n\t\t\t\tCON_MOBILE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人手机',\n\t\t\t\tCON_EMAIL VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人邮箱',\n\t\t\t\tCON_FAX VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人传真',\n\t\t\t\tCON_CERT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件类型',\n\t\t\t\tCON_CERT_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件号',\n\t\t\t\tCON_CERT_DATE DATE NULL DEFAULT NULL COMMENT '联系人证件失效时间',\n\t\t\t\tCON_CER1_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证正面ID',\n\t\t\t\tCON_CER2_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证反面ID',\n\t\t\t\tTHERECER_INTGRATED VARCHAR(20) NULL DEFAULT NULL COMMENT '三证合一标志',\n\t\t\t\tBIZLIC_ID INT(20) NULL DEFAULT NULL COMMENT '营业执照ID',\n\t\t\t\tBIZLIC_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '营业执照代码',\n\t\t\t\tBIZLIC_DATE DATE NULL DEFAULT NULL COMMENT '营业执照失效时间',\n\t\t\t\tTAXREGISTER_ID INT(20) NULL DEFAULT NULL COMMENT '税务等级证书ID',\n\t\t\t\tTAXREGISTER_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '税务登记号',\n\t\t\t\tTAXREGISTER_DATE DATE NULL DEFAULT NULL COMMENT '税务登记失效时间',\n\t\t\t\tORGREG_ID INT(20) NULL DEFAULT NULL COMMENT '组织机构代码证ID',\n\t\t\t\tORGREG_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '组织机构代码',\n\t\t\t\tORGREG_DATE DATE NULL DEFAULT NULL COMMENT '组织机构失效时间',\n\t\t\t\tBANK_ID INT(20) NULL DEFAULT NULL COMMENT '银行开户许可证ID',\n\t\t\t\tBANK_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '开户银行类别',\n\t\t\t\tBANK_DEPOSIT VARCHAR(128) NULL DEFAULT NULL COMMENT '开户银行',\n\t\t\t\tBANK_DEPOSIT_CODE VARCHAR(32) NULL DEFAULT NULL COMMENT '开户银行编码',\n\t\t\t\tBANK_ACCOUNTNO VARCHAR(32) NULL DEFAULT NULL COMMENT '银行账号',\n\t\t\t\tBANK_HOUSEHOULD VARCHAR(32) NULL DEFAULT NULL COMMENT '银行户主',\n\t\t\t\tINVOICE_TITLE VARCHAR(128) NULL DEFAULT NULL COMMENT '开票台头',\n\t\t\t\tINVOICE_ADDRESS VARCHAR(20) NULL DEFAULT NULL COMMENT '开票地址',\n\t\t\t\tINVOICE_ADDRESS_DT VARCHAR(128) NULL DEFAULT NULL COMMENT '开票详细地址',\n\t\t\t\tAPPLY_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '申请审核机构',\n\t\t\t\tBUYER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '买家标识',\n\t\t\t\tSELLER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家标识',\n\t\t\t\tTHIRD_PART_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '第三方标识',\n\t\t\t\tMAIN_USER_ID INT(20) NULL DEFAULT NULL COMMENT '主账号ID',\n\t\t\t\tMDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM主数据CODE',\n\t\t\t\tERP_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'ERP会员CODE',\n\t\t\t\tREG_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '注册类型',\n\t\t\t\tSTATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '会员状态',\n\t\t\t\tAUDITOR VARCHAR(128) NULL DEFAULT NULL COMMENT '审核人',\n\t\t\t\tAUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT '审核时间',\n\t\t\t\tAUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT '审核结果',\n\t\t\t\tAUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT '审核意见',\n\t\t\t\tMDM_AUDITOR VARCHAR(32) NULL DEFAULT NULL COMMENT 'MDM审核人',\n\t\t\t\tMDM_AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT 'MDM审核时间',\n\t\t\t\tMDM_AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT 'MDM审核意见',\n\t\t\t\tMDM_AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM审核结果',\n\t\t\t\tMEMBER_CHG_ID INT(20) NULL DEFAULT NULL COMMENT '变更ID',\n\t\t\t\tCHANGE_STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '变更状态',\n\t\t\t\tALIVE_FLAG VARCHAR(1) NOT NULL COMMENT '当前有效状态',\n\t\t\t\tLANG_VER VARCHAR(10) NULL DEFAULT NULL COMMENT '语言类型',\n\t\t\t\tCREATE_USER INT(20) NOT NULL COMMENT '创建者',\n\t\t\t\tCREATE_DATE DATETIME NOT NULL COMMENT '创建时间',\n\t\t\t\tUPDATE_USER INT(20) NULL DEFAULT NULL COMMENT '修改者',\n\t\t\t\tUPDATE_DATE DATETIME NULL DEFAULT NULL COMMENT '修改时间',\n\t\t\t\tDELETE_USER INT(20) NULL DEFAULT NULL COMMENT '删除者',\n\t\t\t\tDELETE_DATE DATETIME NULL DEFAULT NULL COMMENT '删除时间',\n\t\t\t\tBUYER_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '买家类型(01:个人买家;02:公司买家)',\n\t\t\t\tAUDIT_OPTION_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '审核身份标识(01:平台;02:卖家)',\n\t\t\t\tAUDIT_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '审核卖家ID(当审核身份标识为卖家审核时,审核的卖家ID)',\n\t\t\t\tSELLER_MDM VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统',\n\t\t\t\tSELLER_SAP VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家SAP系统',\n\t\t\t\tSELLER_MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统数据CODE',\n\t\t\t\tIS_PLAT_BLACK VARCHAR(2) NULL DEFAULT NULL COMMENT '黑名单状态(41:是;0:否)',\n\t\t\t\tINVOCIE_ADDRESS_LEFT3 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-省',\n\t\t\t\tINVOCIE_ADDRESS_RIGHT5 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-市',\n\t\t\t\tPRIMARY KEY (MEMBER_ID)\n\t\t\t)\n\t\t\tCOMMENT='会员信息表'\n\t\t\tCOLLATE='utf8_general_ci'\n\t\t\tENGINE=InnoDB\n\t\"\"\"\n cur1.execute(sql)\n print('createTabl', cur1)\n\n\ndef selectTable(db):\n cur1 = db.cursor()\n cur1.execute(\n 'select member_name,MEMBER_CODE,member_id from python_demo limit 10')\n data = cur1.fetchall()\n for index, item in enumerate(data):\n print(index, sep=' ', end=' ')\n for index2, item2 in enumerate(item):\n print(item2, sep=' ', end=' ')\n print('')\n\n\ndef insertTable(db):\n cur1 = db.cursor()\n cur1.execute(\n \"INSERT INTO python_demo (MEMBER_CODE, MEMBER_NAME, MEMBER_NAME_SHORT, COMPANY_NAME_EN, REG_PLACE, REG_ADDRESS, ENT_TYPE, REGCAPITAL, REG_CURRENCY, JUDICIAL_PERSON, BUSINESS_SCOPE, COM_TEL, COM_FAX, PERSON_INCHARGE, ZIP_CODE, CON_NAME, CON_MOBILE, CON_EMAIL, CON_FAX, CON_CERT_TYPE, CON_CERT_NO, CON_CERT_DATE, CON_CER1_ID, CON_CER2_ID, THERECER_INTGRATED, BIZLIC_ID, BIZLIC_NO, BIZLIC_DATE, TAXREGISTER_ID, TAXREGISTER_NO, TAXREGISTER_DATE, ORGREG_ID, ORGREG_NO, ORGREG_DATE, BANK_ID, BANK_TYPE, BANK_DEPOSIT, BANK_DEPOSIT_CODE, BANK_ACCOUNTNO, BANK_HOUSEHOULD, INVOICE_TITLE, INVOICE_ADDRESS, INVOICE_ADDRESS_DT, APPLY_SELLER_ID, BUYER_FLAG, SELLER_FLAG, THIRD_PART_FLAG, MAIN_USER_ID, MDM_DATA_CODE, ERP_DATA_CODE, REG_TYPE, STATUS, AUDITOR, AUDIT_DATE, AUDIT_RESULT, AUDIT_OPINION, MDM_AUDITOR, MDM_AUDIT_DATE, MDM_AUDIT_OPINION, MDM_AUDIT_RESULT, MEMBER_CHG_ID, CHANGE_STATUS, ALIVE_FLAG, LANG_VER, CREATE_USER, CREATE_DATE, UPDATE_USER, UPDATE_DATE, DELETE_USER, DELETE_DATE, BUYER_TYPE, AUDIT_OPTION_FLAG, AUDIT_SELLER_ID, SELLER_MDM, SELLER_SAP, SELLER_MDM_DATA_CODE, IS_PLAT_BLACK, INVOCIE_ADDRESS_LEFT3, INVOCIE_ADDRESS_RIGHT5) VALUES ('A000001', '中国有限公司', '中国有限公司', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'xinming', '15136378930', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', '1', '0', 2, 'M0000001', '00M0000001', '10', '01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 2, '01', '1', 'ZH-CN', 179143, '2016-05-28 12:16:23', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)\"\n )\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef testeSelect(db):\n cur1 = db.cursor()\n cur1.execute('SELECT VERSION()')\n data = cur1.fetchone()\n print(dir(data))\n print('cur1 : %s ' % cur1)\n print('Database version : %s ' % data)\n\n\ndef dropTable(db):\n cur1 = db.cursor()\n cur1.execute('drop table if exists python_demo')\n print('dropTable', cur1)\n\n\ndef createTable(db):\n cur1 = db.cursor()\n sql = \"\"\"\n\t\t\tCREATE TABLE IF NOT EXISTS python_demo (\n\t\t\t\tMEMBER_ID INT(20) NOT NULL AUTO_INCREMENT COMMENT '会员ID',\n\t\t\t\tMEMBER_CODE VARCHAR(20) NOT NULL COMMENT '会员代码',\n\t\t\t\tMEMBER_NAME VARCHAR(128) NOT NULL COMMENT '公司中文名称',\n\t\t\t\tMEMBER_NAME_SHORT VARCHAR(128) NULL DEFAULT NULL COMMENT '公司简称',\n\t\t\t\tCOMPANY_NAME_EN VARCHAR(128) NULL DEFAULT NULL COMMENT '公司英文名称',\n\t\t\t\tREG_PLACE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司注册地址',\n\t\t\t\tREG_ADDRESS VARCHAR(128) NULL DEFAULT NULL COMMENT '公司注册地址',\n\t\t\t\tENT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司性质',\n\t\t\t\tREGCAPITAL DOUBLE(12,2) NULL DEFAULT NULL COMMENT '注册资本',\n\t\t\t\tREG_CURRENCY VARCHAR(20) NULL DEFAULT NULL COMMENT '注册资本币种',\n\t\t\t\tJUDICIAL_PERSON VARCHAR(32) NULL DEFAULT NULL COMMENT '法人名称',\n\t\t\t\tBUSINESS_SCOPE VARCHAR(128) NULL DEFAULT NULL COMMENT '公司经营范围',\n\t\t\t\tCOM_TEL VARCHAR(20) NULL DEFAULT NULL COMMENT '公司电话',\n\t\t\t\tCOM_FAX VARCHAR(64) NULL DEFAULT NULL COMMENT '公司传真',\n\t\t\t\tPERSON_INCHARGE VARCHAR(64) NULL DEFAULT NULL COMMENT '公司负责人',\n\t\t\t\tZIP_CODE VARCHAR(6) NULL DEFAULT NULL COMMENT '邮编',\n\t\t\t\tCON_NAME VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人姓名',\n\t\t\t\tCON_MOBILE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人手机',\n\t\t\t\tCON_EMAIL VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人邮箱',\n\t\t\t\tCON_FAX VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人传真',\n\t\t\t\tCON_CERT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件类型',\n\t\t\t\tCON_CERT_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件号',\n\t\t\t\tCON_CERT_DATE DATE NULL DEFAULT NULL COMMENT '联系人证件失效时间',\n\t\t\t\tCON_CER1_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证正面ID',\n\t\t\t\tCON_CER2_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证反面ID',\n\t\t\t\tTHERECER_INTGRATED VARCHAR(20) NULL DEFAULT NULL COMMENT '三证合一标志',\n\t\t\t\tBIZLIC_ID INT(20) NULL DEFAULT NULL COMMENT '营业执照ID',\n\t\t\t\tBIZLIC_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '营业执照代码',\n\t\t\t\tBIZLIC_DATE DATE NULL DEFAULT NULL COMMENT '营业执照失效时间',\n\t\t\t\tTAXREGISTER_ID INT(20) NULL DEFAULT NULL COMMENT '税务等级证书ID',\n\t\t\t\tTAXREGISTER_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '税务登记号',\n\t\t\t\tTAXREGISTER_DATE DATE NULL DEFAULT NULL COMMENT '税务登记失效时间',\n\t\t\t\tORGREG_ID INT(20) NULL DEFAULT NULL COMMENT '组织机构代码证ID',\n\t\t\t\tORGREG_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '组织机构代码',\n\t\t\t\tORGREG_DATE DATE NULL DEFAULT NULL COMMENT '组织机构失效时间',\n\t\t\t\tBANK_ID INT(20) NULL DEFAULT NULL COMMENT '银行开户许可证ID',\n\t\t\t\tBANK_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '开户银行类别',\n\t\t\t\tBANK_DEPOSIT VARCHAR(128) NULL DEFAULT NULL COMMENT '开户银行',\n\t\t\t\tBANK_DEPOSIT_CODE VARCHAR(32) NULL DEFAULT NULL COMMENT '开户银行编码',\n\t\t\t\tBANK_ACCOUNTNO VARCHAR(32) NULL DEFAULT NULL COMMENT '银行账号',\n\t\t\t\tBANK_HOUSEHOULD VARCHAR(32) NULL DEFAULT NULL COMMENT '银行户主',\n\t\t\t\tINVOICE_TITLE VARCHAR(128) NULL DEFAULT NULL COMMENT '开票台头',\n\t\t\t\tINVOICE_ADDRESS VARCHAR(20) NULL DEFAULT NULL COMMENT '开票地址',\n\t\t\t\tINVOICE_ADDRESS_DT VARCHAR(128) NULL DEFAULT NULL COMMENT '开票详细地址',\n\t\t\t\tAPPLY_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '申请审核机构',\n\t\t\t\tBUYER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '买家标识',\n\t\t\t\tSELLER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家标识',\n\t\t\t\tTHIRD_PART_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '第三方标识',\n\t\t\t\tMAIN_USER_ID INT(20) NULL DEFAULT NULL COMMENT '主账号ID',\n\t\t\t\tMDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM主数据CODE',\n\t\t\t\tERP_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'ERP会员CODE',\n\t\t\t\tREG_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '注册类型',\n\t\t\t\tSTATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '会员状态',\n\t\t\t\tAUDITOR VARCHAR(128) NULL DEFAULT NULL COMMENT '审核人',\n\t\t\t\tAUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT '审核时间',\n\t\t\t\tAUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT '审核结果',\n\t\t\t\tAUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT '审核意见',\n\t\t\t\tMDM_AUDITOR VARCHAR(32) NULL DEFAULT NULL COMMENT 'MDM审核人',\n\t\t\t\tMDM_AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT 'MDM审核时间',\n\t\t\t\tMDM_AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT 'MDM审核意见',\n\t\t\t\tMDM_AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM审核结果',\n\t\t\t\tMEMBER_CHG_ID INT(20) NULL DEFAULT NULL COMMENT '变更ID',\n\t\t\t\tCHANGE_STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '变更状态',\n\t\t\t\tALIVE_FLAG VARCHAR(1) NOT NULL COMMENT '当前有效状态',\n\t\t\t\tLANG_VER VARCHAR(10) NULL DEFAULT NULL COMMENT '语言类型',\n\t\t\t\tCREATE_USER INT(20) NOT NULL COMMENT '创建者',\n\t\t\t\tCREATE_DATE DATETIME NOT NULL COMMENT '创建时间',\n\t\t\t\tUPDATE_USER INT(20) NULL DEFAULT NULL COMMENT '修改者',\n\t\t\t\tUPDATE_DATE DATETIME NULL DEFAULT NULL COMMENT '修改时间',\n\t\t\t\tDELETE_USER INT(20) NULL DEFAULT NULL COMMENT '删除者',\n\t\t\t\tDELETE_DATE DATETIME NULL DEFAULT NULL COMMENT '删除时间',\n\t\t\t\tBUYER_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '买家类型(01:个人买家;02:公司买家)',\n\t\t\t\tAUDIT_OPTION_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '审核身份标识(01:平台;02:卖家)',\n\t\t\t\tAUDIT_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '审核卖家ID(当审核身份标识为卖家审核时,审核的卖家ID)',\n\t\t\t\tSELLER_MDM VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统',\n\t\t\t\tSELLER_SAP VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家SAP系统',\n\t\t\t\tSELLER_MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统数据CODE',\n\t\t\t\tIS_PLAT_BLACK VARCHAR(2) NULL DEFAULT NULL COMMENT '黑名单状态(41:是;0:否)',\n\t\t\t\tINVOCIE_ADDRESS_LEFT3 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-省',\n\t\t\t\tINVOCIE_ADDRESS_RIGHT5 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-市',\n\t\t\t\tPRIMARY KEY (MEMBER_ID)\n\t\t\t)\n\t\t\tCOMMENT='会员信息表'\n\t\t\tCOLLATE='utf8_general_ci'\n\t\t\tENGINE=InnoDB\n\t\"\"\"\n cur1.execute(sql)\n print('createTabl', cur1)\n\n\ndef selectTable(db):\n cur1 = db.cursor()\n cur1.execute(\n 'select member_name,MEMBER_CODE,member_id from python_demo limit 10')\n data = cur1.fetchall()\n for index, item in enumerate(data):\n print(index, sep=' ', end=' ')\n for index2, item2 in enumerate(item):\n print(item2, sep=' ', end=' ')\n print('')\n\n\ndef insertTable(db):\n cur1 = db.cursor()\n cur1.execute(\n \"INSERT INTO python_demo (MEMBER_CODE, MEMBER_NAME, MEMBER_NAME_SHORT, COMPANY_NAME_EN, REG_PLACE, REG_ADDRESS, ENT_TYPE, REGCAPITAL, REG_CURRENCY, JUDICIAL_PERSON, BUSINESS_SCOPE, COM_TEL, COM_FAX, PERSON_INCHARGE, ZIP_CODE, CON_NAME, CON_MOBILE, CON_EMAIL, CON_FAX, CON_CERT_TYPE, CON_CERT_NO, CON_CERT_DATE, CON_CER1_ID, CON_CER2_ID, THERECER_INTGRATED, BIZLIC_ID, BIZLIC_NO, BIZLIC_DATE, TAXREGISTER_ID, TAXREGISTER_NO, TAXREGISTER_DATE, ORGREG_ID, ORGREG_NO, ORGREG_DATE, BANK_ID, BANK_TYPE, BANK_DEPOSIT, BANK_DEPOSIT_CODE, BANK_ACCOUNTNO, BANK_HOUSEHOULD, INVOICE_TITLE, INVOICE_ADDRESS, INVOICE_ADDRESS_DT, APPLY_SELLER_ID, BUYER_FLAG, SELLER_FLAG, THIRD_PART_FLAG, MAIN_USER_ID, MDM_DATA_CODE, ERP_DATA_CODE, REG_TYPE, STATUS, AUDITOR, AUDIT_DATE, AUDIT_RESULT, AUDIT_OPINION, MDM_AUDITOR, MDM_AUDIT_DATE, MDM_AUDIT_OPINION, MDM_AUDIT_RESULT, MEMBER_CHG_ID, CHANGE_STATUS, ALIVE_FLAG, LANG_VER, CREATE_USER, CREATE_DATE, UPDATE_USER, UPDATE_DATE, DELETE_USER, DELETE_DATE, BUYER_TYPE, AUDIT_OPTION_FLAG, AUDIT_SELLER_ID, SELLER_MDM, SELLER_SAP, SELLER_MDM_DATA_CODE, IS_PLAT_BLACK, INVOCIE_ADDRESS_LEFT3, INVOCIE_ADDRESS_RIGHT5) VALUES ('A000001', '中国有限公司', '中国有限公司', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'xinming', '15136378930', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', '1', '0', 2, 'M0000001', '00M0000001', '10', '01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 2, '01', '1', 'ZH-CN', 179143, '2016-05-28 12:16:23', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)\"\n )\n\n\n<mask token>\ndropTable(db)\ncreateTable(db)\ninsertTable(db)\ninsertTable(db)\ninsertTable(db)\ninsertTable(db)\ninsertTable(db)\ntesteSelect(db)\nselectTable(db)\ndb.close()\n",
"step-4": "<mask token>\n\n\ndef testeSelect(db):\n cur1 = db.cursor()\n cur1.execute('SELECT VERSION()')\n data = cur1.fetchone()\n print(dir(data))\n print('cur1 : %s ' % cur1)\n print('Database version : %s ' % data)\n\n\ndef dropTable(db):\n cur1 = db.cursor()\n cur1.execute('drop table if exists python_demo')\n print('dropTable', cur1)\n\n\ndef createTable(db):\n cur1 = db.cursor()\n sql = \"\"\"\n\t\t\tCREATE TABLE IF NOT EXISTS python_demo (\n\t\t\t\tMEMBER_ID INT(20) NOT NULL AUTO_INCREMENT COMMENT '会员ID',\n\t\t\t\tMEMBER_CODE VARCHAR(20) NOT NULL COMMENT '会员代码',\n\t\t\t\tMEMBER_NAME VARCHAR(128) NOT NULL COMMENT '公司中文名称',\n\t\t\t\tMEMBER_NAME_SHORT VARCHAR(128) NULL DEFAULT NULL COMMENT '公司简称',\n\t\t\t\tCOMPANY_NAME_EN VARCHAR(128) NULL DEFAULT NULL COMMENT '公司英文名称',\n\t\t\t\tREG_PLACE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司注册地址',\n\t\t\t\tREG_ADDRESS VARCHAR(128) NULL DEFAULT NULL COMMENT '公司注册地址',\n\t\t\t\tENT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司性质',\n\t\t\t\tREGCAPITAL DOUBLE(12,2) NULL DEFAULT NULL COMMENT '注册资本',\n\t\t\t\tREG_CURRENCY VARCHAR(20) NULL DEFAULT NULL COMMENT '注册资本币种',\n\t\t\t\tJUDICIAL_PERSON VARCHAR(32) NULL DEFAULT NULL COMMENT '法人名称',\n\t\t\t\tBUSINESS_SCOPE VARCHAR(128) NULL DEFAULT NULL COMMENT '公司经营范围',\n\t\t\t\tCOM_TEL VARCHAR(20) NULL DEFAULT NULL COMMENT '公司电话',\n\t\t\t\tCOM_FAX VARCHAR(64) NULL DEFAULT NULL COMMENT '公司传真',\n\t\t\t\tPERSON_INCHARGE VARCHAR(64) NULL DEFAULT NULL COMMENT '公司负责人',\n\t\t\t\tZIP_CODE VARCHAR(6) NULL DEFAULT NULL COMMENT '邮编',\n\t\t\t\tCON_NAME VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人姓名',\n\t\t\t\tCON_MOBILE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人手机',\n\t\t\t\tCON_EMAIL VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人邮箱',\n\t\t\t\tCON_FAX VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人传真',\n\t\t\t\tCON_CERT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件类型',\n\t\t\t\tCON_CERT_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件号',\n\t\t\t\tCON_CERT_DATE DATE NULL DEFAULT NULL COMMENT '联系人证件失效时间',\n\t\t\t\tCON_CER1_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证正面ID',\n\t\t\t\tCON_CER2_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证反面ID',\n\t\t\t\tTHERECER_INTGRATED VARCHAR(20) NULL DEFAULT NULL COMMENT '三证合一标志',\n\t\t\t\tBIZLIC_ID INT(20) NULL DEFAULT NULL COMMENT '营业执照ID',\n\t\t\t\tBIZLIC_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '营业执照代码',\n\t\t\t\tBIZLIC_DATE DATE NULL DEFAULT NULL COMMENT '营业执照失效时间',\n\t\t\t\tTAXREGISTER_ID INT(20) NULL DEFAULT NULL COMMENT '税务等级证书ID',\n\t\t\t\tTAXREGISTER_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '税务登记号',\n\t\t\t\tTAXREGISTER_DATE DATE NULL DEFAULT NULL COMMENT '税务登记失效时间',\n\t\t\t\tORGREG_ID INT(20) NULL DEFAULT NULL COMMENT '组织机构代码证ID',\n\t\t\t\tORGREG_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '组织机构代码',\n\t\t\t\tORGREG_DATE DATE NULL DEFAULT NULL COMMENT '组织机构失效时间',\n\t\t\t\tBANK_ID INT(20) NULL DEFAULT NULL COMMENT '银行开户许可证ID',\n\t\t\t\tBANK_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '开户银行类别',\n\t\t\t\tBANK_DEPOSIT VARCHAR(128) NULL DEFAULT NULL COMMENT '开户银行',\n\t\t\t\tBANK_DEPOSIT_CODE VARCHAR(32) NULL DEFAULT NULL COMMENT '开户银行编码',\n\t\t\t\tBANK_ACCOUNTNO VARCHAR(32) NULL DEFAULT NULL COMMENT '银行账号',\n\t\t\t\tBANK_HOUSEHOULD VARCHAR(32) NULL DEFAULT NULL COMMENT '银行户主',\n\t\t\t\tINVOICE_TITLE VARCHAR(128) NULL DEFAULT NULL COMMENT '开票台头',\n\t\t\t\tINVOICE_ADDRESS VARCHAR(20) NULL DEFAULT NULL COMMENT '开票地址',\n\t\t\t\tINVOICE_ADDRESS_DT VARCHAR(128) NULL DEFAULT NULL COMMENT '开票详细地址',\n\t\t\t\tAPPLY_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '申请审核机构',\n\t\t\t\tBUYER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '买家标识',\n\t\t\t\tSELLER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家标识',\n\t\t\t\tTHIRD_PART_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '第三方标识',\n\t\t\t\tMAIN_USER_ID INT(20) NULL DEFAULT NULL COMMENT '主账号ID',\n\t\t\t\tMDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM主数据CODE',\n\t\t\t\tERP_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'ERP会员CODE',\n\t\t\t\tREG_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '注册类型',\n\t\t\t\tSTATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '会员状态',\n\t\t\t\tAUDITOR VARCHAR(128) NULL DEFAULT NULL COMMENT '审核人',\n\t\t\t\tAUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT '审核时间',\n\t\t\t\tAUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT '审核结果',\n\t\t\t\tAUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT '审核意见',\n\t\t\t\tMDM_AUDITOR VARCHAR(32) NULL DEFAULT NULL COMMENT 'MDM审核人',\n\t\t\t\tMDM_AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT 'MDM审核时间',\n\t\t\t\tMDM_AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT 'MDM审核意见',\n\t\t\t\tMDM_AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM审核结果',\n\t\t\t\tMEMBER_CHG_ID INT(20) NULL DEFAULT NULL COMMENT '变更ID',\n\t\t\t\tCHANGE_STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '变更状态',\n\t\t\t\tALIVE_FLAG VARCHAR(1) NOT NULL COMMENT '当前有效状态',\n\t\t\t\tLANG_VER VARCHAR(10) NULL DEFAULT NULL COMMENT '语言类型',\n\t\t\t\tCREATE_USER INT(20) NOT NULL COMMENT '创建者',\n\t\t\t\tCREATE_DATE DATETIME NOT NULL COMMENT '创建时间',\n\t\t\t\tUPDATE_USER INT(20) NULL DEFAULT NULL COMMENT '修改者',\n\t\t\t\tUPDATE_DATE DATETIME NULL DEFAULT NULL COMMENT '修改时间',\n\t\t\t\tDELETE_USER INT(20) NULL DEFAULT NULL COMMENT '删除者',\n\t\t\t\tDELETE_DATE DATETIME NULL DEFAULT NULL COMMENT '删除时间',\n\t\t\t\tBUYER_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '买家类型(01:个人买家;02:公司买家)',\n\t\t\t\tAUDIT_OPTION_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '审核身份标识(01:平台;02:卖家)',\n\t\t\t\tAUDIT_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '审核卖家ID(当审核身份标识为卖家审核时,审核的卖家ID)',\n\t\t\t\tSELLER_MDM VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统',\n\t\t\t\tSELLER_SAP VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家SAP系统',\n\t\t\t\tSELLER_MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统数据CODE',\n\t\t\t\tIS_PLAT_BLACK VARCHAR(2) NULL DEFAULT NULL COMMENT '黑名单状态(41:是;0:否)',\n\t\t\t\tINVOCIE_ADDRESS_LEFT3 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-省',\n\t\t\t\tINVOCIE_ADDRESS_RIGHT5 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-市',\n\t\t\t\tPRIMARY KEY (MEMBER_ID)\n\t\t\t)\n\t\t\tCOMMENT='会员信息表'\n\t\t\tCOLLATE='utf8_general_ci'\n\t\t\tENGINE=InnoDB\n\t\"\"\"\n cur1.execute(sql)\n print('createTabl', cur1)\n\n\ndef selectTable(db):\n cur1 = db.cursor()\n cur1.execute(\n 'select member_name,MEMBER_CODE,member_id from python_demo limit 10')\n data = cur1.fetchall()\n for index, item in enumerate(data):\n print(index, sep=' ', end=' ')\n for index2, item2 in enumerate(item):\n print(item2, sep=' ', end=' ')\n print('')\n\n\ndef insertTable(db):\n cur1 = db.cursor()\n cur1.execute(\n \"INSERT INTO python_demo (MEMBER_CODE, MEMBER_NAME, MEMBER_NAME_SHORT, COMPANY_NAME_EN, REG_PLACE, REG_ADDRESS, ENT_TYPE, REGCAPITAL, REG_CURRENCY, JUDICIAL_PERSON, BUSINESS_SCOPE, COM_TEL, COM_FAX, PERSON_INCHARGE, ZIP_CODE, CON_NAME, CON_MOBILE, CON_EMAIL, CON_FAX, CON_CERT_TYPE, CON_CERT_NO, CON_CERT_DATE, CON_CER1_ID, CON_CER2_ID, THERECER_INTGRATED, BIZLIC_ID, BIZLIC_NO, BIZLIC_DATE, TAXREGISTER_ID, TAXREGISTER_NO, TAXREGISTER_DATE, ORGREG_ID, ORGREG_NO, ORGREG_DATE, BANK_ID, BANK_TYPE, BANK_DEPOSIT, BANK_DEPOSIT_CODE, BANK_ACCOUNTNO, BANK_HOUSEHOULD, INVOICE_TITLE, INVOICE_ADDRESS, INVOICE_ADDRESS_DT, APPLY_SELLER_ID, BUYER_FLAG, SELLER_FLAG, THIRD_PART_FLAG, MAIN_USER_ID, MDM_DATA_CODE, ERP_DATA_CODE, REG_TYPE, STATUS, AUDITOR, AUDIT_DATE, AUDIT_RESULT, AUDIT_OPINION, MDM_AUDITOR, MDM_AUDIT_DATE, MDM_AUDIT_OPINION, MDM_AUDIT_RESULT, MEMBER_CHG_ID, CHANGE_STATUS, ALIVE_FLAG, LANG_VER, CREATE_USER, CREATE_DATE, UPDATE_USER, UPDATE_DATE, DELETE_USER, DELETE_DATE, BUYER_TYPE, AUDIT_OPTION_FLAG, AUDIT_SELLER_ID, SELLER_MDM, SELLER_SAP, SELLER_MDM_DATA_CODE, IS_PLAT_BLACK, INVOCIE_ADDRESS_LEFT3, INVOCIE_ADDRESS_RIGHT5) VALUES ('A000001', '中国有限公司', '中国有限公司', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'xinming', '15136378930', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', '1', '0', 2, 'M0000001', '00M0000001', '10', '01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 2, '01', '1', 'ZH-CN', 179143, '2016-05-28 12:16:23', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)\"\n )\n\n\ndb = pymysql.connect(host='127.0.0.1', user='pu3147', password='1qaz@WSX',\n database='demo', port=3306, charset='UTF8')\ndropTable(db)\ncreateTable(db)\ninsertTable(db)\ninsertTable(db)\ninsertTable(db)\ninsertTable(db)\ninsertTable(db)\ntesteSelect(db)\nselectTable(db)\ndb.close()\n",
"step-5": "import pymysql\n\n\n\ndef testeSelect(db):\n\t#创建查询游标\n\tcur1 = db.cursor()\n\n\t# 使用 execute() 方法执行 SQL 查询 \n\tcur1.execute(\"SELECT VERSION()\")\n\t \n\t# 使用 fetchone() 方法获取单条数据.\n\tdata = cur1.fetchone()\n\tprint(dir(data))\n\tprint (\"cur1 : %s \" % cur1) \n\tprint (\"Database version : %s \" % data)\n\ndef dropTable(db):\n\t#创建查询游标\n\tcur1 = db.cursor()\n\n\tcur1.execute(\"drop table if exists python_demo\")\n\tprint('dropTable',cur1)\n\t \n\ndef createTable(db):\n\t#创建查询游标\n\tcur1 = db.cursor()\n\n\tsql = '''\n\t\t\tCREATE TABLE IF NOT EXISTS python_demo (\n\t\t\t\tMEMBER_ID INT(20) NOT NULL AUTO_INCREMENT COMMENT '会员ID',\n\t\t\t\tMEMBER_CODE VARCHAR(20) NOT NULL COMMENT '会员代码',\n\t\t\t\tMEMBER_NAME VARCHAR(128) NOT NULL COMMENT '公司中文名称',\n\t\t\t\tMEMBER_NAME_SHORT VARCHAR(128) NULL DEFAULT NULL COMMENT '公司简称',\n\t\t\t\tCOMPANY_NAME_EN VARCHAR(128) NULL DEFAULT NULL COMMENT '公司英文名称',\n\t\t\t\tREG_PLACE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司注册地址',\n\t\t\t\tREG_ADDRESS VARCHAR(128) NULL DEFAULT NULL COMMENT '公司注册地址',\n\t\t\t\tENT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '公司性质',\n\t\t\t\tREGCAPITAL DOUBLE(12,2) NULL DEFAULT NULL COMMENT '注册资本',\n\t\t\t\tREG_CURRENCY VARCHAR(20) NULL DEFAULT NULL COMMENT '注册资本币种',\n\t\t\t\tJUDICIAL_PERSON VARCHAR(32) NULL DEFAULT NULL COMMENT '法人名称',\n\t\t\t\tBUSINESS_SCOPE VARCHAR(128) NULL DEFAULT NULL COMMENT '公司经营范围',\n\t\t\t\tCOM_TEL VARCHAR(20) NULL DEFAULT NULL COMMENT '公司电话',\n\t\t\t\tCOM_FAX VARCHAR(64) NULL DEFAULT NULL COMMENT '公司传真',\n\t\t\t\tPERSON_INCHARGE VARCHAR(64) NULL DEFAULT NULL COMMENT '公司负责人',\n\t\t\t\tZIP_CODE VARCHAR(6) NULL DEFAULT NULL COMMENT '邮编',\n\t\t\t\tCON_NAME VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人姓名',\n\t\t\t\tCON_MOBILE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人手机',\n\t\t\t\tCON_EMAIL VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人邮箱',\n\t\t\t\tCON_FAX VARCHAR(32) NULL DEFAULT NULL COMMENT '联系人传真',\n\t\t\t\tCON_CERT_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件类型',\n\t\t\t\tCON_CERT_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '联系人证件号',\n\t\t\t\tCON_CERT_DATE DATE NULL DEFAULT NULL COMMENT '联系人证件失效时间',\n\t\t\t\tCON_CER1_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证正面ID',\n\t\t\t\tCON_CER2_ID INT(20) NULL DEFAULT NULL COMMENT '联系人身份证反面ID',\n\t\t\t\tTHERECER_INTGRATED VARCHAR(20) NULL DEFAULT NULL COMMENT '三证合一标志',\n\t\t\t\tBIZLIC_ID INT(20) NULL DEFAULT NULL COMMENT '营业执照ID',\n\t\t\t\tBIZLIC_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '营业执照代码',\n\t\t\t\tBIZLIC_DATE DATE NULL DEFAULT NULL COMMENT '营业执照失效时间',\n\t\t\t\tTAXREGISTER_ID INT(20) NULL DEFAULT NULL COMMENT '税务等级证书ID',\n\t\t\t\tTAXREGISTER_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '税务登记号',\n\t\t\t\tTAXREGISTER_DATE DATE NULL DEFAULT NULL COMMENT '税务登记失效时间',\n\t\t\t\tORGREG_ID INT(20) NULL DEFAULT NULL COMMENT '组织机构代码证ID',\n\t\t\t\tORGREG_NO VARCHAR(20) NULL DEFAULT NULL COMMENT '组织机构代码',\n\t\t\t\tORGREG_DATE DATE NULL DEFAULT NULL COMMENT '组织机构失效时间',\n\t\t\t\tBANK_ID INT(20) NULL DEFAULT NULL COMMENT '银行开户许可证ID',\n\t\t\t\tBANK_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '开户银行类别',\n\t\t\t\tBANK_DEPOSIT VARCHAR(128) NULL DEFAULT NULL COMMENT '开户银行',\n\t\t\t\tBANK_DEPOSIT_CODE VARCHAR(32) NULL DEFAULT NULL COMMENT '开户银行编码',\n\t\t\t\tBANK_ACCOUNTNO VARCHAR(32) NULL DEFAULT NULL COMMENT '银行账号',\n\t\t\t\tBANK_HOUSEHOULD VARCHAR(32) NULL DEFAULT NULL COMMENT '银行户主',\n\t\t\t\tINVOICE_TITLE VARCHAR(128) NULL DEFAULT NULL COMMENT '开票台头',\n\t\t\t\tINVOICE_ADDRESS VARCHAR(20) NULL DEFAULT NULL COMMENT '开票地址',\n\t\t\t\tINVOICE_ADDRESS_DT VARCHAR(128) NULL DEFAULT NULL COMMENT '开票详细地址',\n\t\t\t\tAPPLY_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '申请审核机构',\n\t\t\t\tBUYER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '买家标识',\n\t\t\t\tSELLER_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家标识',\n\t\t\t\tTHIRD_PART_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '第三方标识',\n\t\t\t\tMAIN_USER_ID INT(20) NULL DEFAULT NULL COMMENT '主账号ID',\n\t\t\t\tMDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM主数据CODE',\n\t\t\t\tERP_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT 'ERP会员CODE',\n\t\t\t\tREG_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '注册类型',\n\t\t\t\tSTATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '会员状态',\n\t\t\t\tAUDITOR VARCHAR(128) NULL DEFAULT NULL COMMENT '审核人',\n\t\t\t\tAUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT '审核时间',\n\t\t\t\tAUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT '审核结果',\n\t\t\t\tAUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT '审核意见',\n\t\t\t\tMDM_AUDITOR VARCHAR(32) NULL DEFAULT NULL COMMENT 'MDM审核人',\n\t\t\t\tMDM_AUDIT_DATE DATETIME NULL DEFAULT NULL COMMENT 'MDM审核时间',\n\t\t\t\tMDM_AUDIT_OPINION VARCHAR(128) NULL DEFAULT NULL COMMENT 'MDM审核意见',\n\t\t\t\tMDM_AUDIT_RESULT VARCHAR(20) NULL DEFAULT NULL COMMENT 'MDM审核结果',\n\t\t\t\tMEMBER_CHG_ID INT(20) NULL DEFAULT NULL COMMENT '变更ID',\n\t\t\t\tCHANGE_STATUS VARCHAR(20) NULL DEFAULT NULL COMMENT '变更状态',\n\t\t\t\tALIVE_FLAG VARCHAR(1) NOT NULL COMMENT '当前有效状态',\n\t\t\t\tLANG_VER VARCHAR(10) NULL DEFAULT NULL COMMENT '语言类型',\n\t\t\t\tCREATE_USER INT(20) NOT NULL COMMENT '创建者',\n\t\t\t\tCREATE_DATE DATETIME NOT NULL COMMENT '创建时间',\n\t\t\t\tUPDATE_USER INT(20) NULL DEFAULT NULL COMMENT '修改者',\n\t\t\t\tUPDATE_DATE DATETIME NULL DEFAULT NULL COMMENT '修改时间',\n\t\t\t\tDELETE_USER INT(20) NULL DEFAULT NULL COMMENT '删除者',\n\t\t\t\tDELETE_DATE DATETIME NULL DEFAULT NULL COMMENT '删除时间',\n\t\t\t\tBUYER_TYPE VARCHAR(20) NULL DEFAULT NULL COMMENT '买家类型(01:个人买家;02:公司买家)',\n\t\t\t\tAUDIT_OPTION_FLAG VARCHAR(20) NULL DEFAULT NULL COMMENT '审核身份标识(01:平台;02:卖家)',\n\t\t\t\tAUDIT_SELLER_ID INT(20) NULL DEFAULT NULL COMMENT '审核卖家ID(当审核身份标识为卖家审核时,审核的卖家ID)',\n\t\t\t\tSELLER_MDM VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统',\n\t\t\t\tSELLER_SAP VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家SAP系统',\n\t\t\t\tSELLER_MDM_DATA_CODE VARCHAR(20) NULL DEFAULT NULL COMMENT '卖家MDM系统数据CODE',\n\t\t\t\tIS_PLAT_BLACK VARCHAR(2) NULL DEFAULT NULL COMMENT '黑名单状态(41:是;0:否)',\n\t\t\t\tINVOCIE_ADDRESS_LEFT3 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-省',\n\t\t\t\tINVOCIE_ADDRESS_RIGHT5 VARCHAR(10) NULL DEFAULT NULL COMMENT '用户所属区域-市',\n\t\t\t\tPRIMARY KEY (MEMBER_ID)\n\t\t\t)\n\t\t\tCOMMENT='会员信息表'\n\t\t\tCOLLATE='utf8_general_ci'\n\t\t\tENGINE=InnoDB\n\t'''\n\n\tcur1.execute(sql)\n\t\n\tprint('createTabl',cur1)\n\ndef selectTable(db):\n\t#创建查询游标\n\tcur1 = db.cursor()\n\n\tcur1.execute(\"select member_name,MEMBER_CODE,member_id from python_demo limit 10\")\n\n\t# 使用 fetchall() 接收全部的返回结果行\n\tdata = cur1.fetchall()\n\tfor index,item in enumerate(data):\n\t\tprint(index,sep=' ', end=' ')\n\t\tfor index2,item2 in enumerate(item):\n\t\t\tprint(item2,sep=' ', end=' ')\n\t\tprint(\"\")\n\ndef insertTable(db):\n\t#创建查询游标\n\tcur1 = db.cursor()\n\n\tcur1.execute(\"INSERT INTO python_demo (MEMBER_CODE, MEMBER_NAME, MEMBER_NAME_SHORT, COMPANY_NAME_EN, REG_PLACE, REG_ADDRESS, ENT_TYPE, REGCAPITAL, REG_CURRENCY, JUDICIAL_PERSON, BUSINESS_SCOPE, COM_TEL, COM_FAX, PERSON_INCHARGE, ZIP_CODE, CON_NAME, CON_MOBILE, CON_EMAIL, CON_FAX, CON_CERT_TYPE, CON_CERT_NO, CON_CERT_DATE, CON_CER1_ID, CON_CER2_ID, THERECER_INTGRATED, BIZLIC_ID, BIZLIC_NO, BIZLIC_DATE, TAXREGISTER_ID, TAXREGISTER_NO, TAXREGISTER_DATE, ORGREG_ID, ORGREG_NO, ORGREG_DATE, BANK_ID, BANK_TYPE, BANK_DEPOSIT, BANK_DEPOSIT_CODE, BANK_ACCOUNTNO, BANK_HOUSEHOULD, INVOICE_TITLE, INVOICE_ADDRESS, INVOICE_ADDRESS_DT, APPLY_SELLER_ID, BUYER_FLAG, SELLER_FLAG, THIRD_PART_FLAG, MAIN_USER_ID, MDM_DATA_CODE, ERP_DATA_CODE, REG_TYPE, STATUS, AUDITOR, AUDIT_DATE, AUDIT_RESULT, AUDIT_OPINION, MDM_AUDITOR, MDM_AUDIT_DATE, MDM_AUDIT_OPINION, MDM_AUDIT_RESULT, MEMBER_CHG_ID, CHANGE_STATUS, ALIVE_FLAG, LANG_VER, CREATE_USER, CREATE_DATE, UPDATE_USER, UPDATE_DATE, DELETE_USER, DELETE_DATE, BUYER_TYPE, AUDIT_OPTION_FLAG, AUDIT_SELLER_ID, SELLER_MDM, SELLER_SAP, SELLER_MDM_DATA_CODE, IS_PLAT_BLACK, INVOCIE_ADDRESS_LEFT3, INVOCIE_ADDRESS_RIGHT5) VALUES ('A000001', '中国有限公司', '中国有限公司', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'xinming', '15136378930', NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, '0', '1', '0', 2, 'M0000001', '00M0000001', '10', '01', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 2, '01', '1', 'ZH-CN', 179143, '2016-05-28 12:16:23', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)\")\n\n\t# 使用 fetchall() 接收全部的返回结果行\n\t#data = cur1.rowcount()\n\n\t#print('insertTable',data)\n\n# 打开数据库连接\ndb = pymysql.connect(host='127.0.0.1',user='pu3147',\n\tpassword='1qaz@WSX',database='demo',port=3306,charset='UTF8')\n\n\ndropTable(db)\ncreateTable(db)\ninsertTable(db)\ninsertTable(db)\ninsertTable(db)\ninsertTable(db)\ninsertTable(db)\ntesteSelect(db)\nselectTable(db)\n\n# 关闭数据库连接\ndb.close()\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
<|reserved_special_token_0|>
class QQ_Login_Page(BasePage):
def login(self):
local.pyapp.click('android=>new UiSelector().text("登 录")')
def username(self):
local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)
def passwd(self):
local.pyapp.type('content=>密码 安全', 'besttest123')
def left_close(self):
css = 'android=>new UiSelector().text("关闭")'
local.pyapp.click(css)
def login_check(self, name):
return local.pyapp.wait_and_save_exception(
'android=>new UiSelector().text("登 录")', name)
class SetLock(QQ_Login_Page):
def photo(self):
local.pyapp.click('content=>帐户及设置')
def set_up(self):
local.pyapp.click('content=>设置')
def set_up_of_account(self):
local.pyapp.click('android=>new UiSelector().text("帐号、设备安全")')
def set_gesture_passwd(self):
local.pyapp.click('content=>手势密码锁定')
def create_gesture(self):
local.pyapp.click('android=>new UiSelector().text("创建手势密码")')
def set_gesture(self):
element = local.pyapp.get_elements('class=>android.view.View')[12]
location = element.location
x = location['x']
y = location['y']
size = element.size
width = size['width']
height = size['height']
sample_width = width / 3 / 2
sample_height = height / 3 / 2
onex = x + sample_width
oney = y + sample_height
twox = x + sample_width * 3
twoy = y + sample_height
threex = x + sample_width * 5
threey = y + sample_width
fourx = x + sample_width * 3
foury = y + sample_width * 3
TouchAction(local.driver).press(x=onex, y=oney).wait(300).move_to(x
=twox - onex, y=twoy - oney).wait(300).move_to(x=threex - twox,
y=threey - twoy).wait(300).move_to(x=fourx - threex, y=foury -
threey).perform()
def set_lock_check(self, name):
return local.pyapp.wait_and_save_exception(
'android=>new UiSelector().text("修改手势密码")', name)
class Page(SetLock):
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BasePage(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class QQ_Login_Page(BasePage):
def login(self):
local.pyapp.click('android=>new UiSelector().text("登 录")')
def username(self):
local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)
def passwd(self):
local.pyapp.type('content=>密码 安全', 'besttest123')
def left_close(self):
css = 'android=>new UiSelector().text("关闭")'
local.pyapp.click(css)
def login_check(self, name):
return local.pyapp.wait_and_save_exception(
'android=>new UiSelector().text("登 录")', name)
class SetLock(QQ_Login_Page):
def photo(self):
local.pyapp.click('content=>帐户及设置')
def set_up(self):
local.pyapp.click('content=>设置')
def set_up_of_account(self):
local.pyapp.click('android=>new UiSelector().text("帐号、设备安全")')
def set_gesture_passwd(self):
local.pyapp.click('content=>手势密码锁定')
def create_gesture(self):
local.pyapp.click('android=>new UiSelector().text("创建手势密码")')
def set_gesture(self):
element = local.pyapp.get_elements('class=>android.view.View')[12]
location = element.location
x = location['x']
y = location['y']
size = element.size
width = size['width']
height = size['height']
sample_width = width / 3 / 2
sample_height = height / 3 / 2
onex = x + sample_width
oney = y + sample_height
twox = x + sample_width * 3
twoy = y + sample_height
threex = x + sample_width * 5
threey = y + sample_width
fourx = x + sample_width * 3
foury = y + sample_width * 3
TouchAction(local.driver).press(x=onex, y=oney).wait(300).move_to(x
=twox - onex, y=twoy - oney).wait(300).move_to(x=threex - twox,
y=threey - twoy).wait(300).move_to(x=fourx - threex, y=foury -
threey).perform()
def set_lock_check(self, name):
return local.pyapp.wait_and_save_exception(
'android=>new UiSelector().text("修改手势密码")', name)
class Page(SetLock):
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BasePage(object):
def __init__(self, driver=None):
if driver is None:
local.driver = driver_queue.get()
local.pyapp = Pyapp(local.driver)
else:
local.driver = driver
local.pyapp = Pyapp(driver)
def quit(self):
local.pyapp.quit()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class QQ_Login_Page(BasePage):
def login(self):
local.pyapp.click('android=>new UiSelector().text("登 录")')
def username(self):
local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)
def passwd(self):
local.pyapp.type('content=>密码 安全', 'besttest123')
def left_close(self):
css = 'android=>new UiSelector().text("关闭")'
local.pyapp.click(css)
def login_check(self, name):
return local.pyapp.wait_and_save_exception(
'android=>new UiSelector().text("登 录")', name)
class SetLock(QQ_Login_Page):
def photo(self):
local.pyapp.click('content=>帐户及设置')
def set_up(self):
local.pyapp.click('content=>设置')
def set_up_of_account(self):
local.pyapp.click('android=>new UiSelector().text("帐号、设备安全")')
def set_gesture_passwd(self):
local.pyapp.click('content=>手势密码锁定')
def create_gesture(self):
local.pyapp.click('android=>new UiSelector().text("创建手势密码")')
def set_gesture(self):
element = local.pyapp.get_elements('class=>android.view.View')[12]
location = element.location
x = location['x']
y = location['y']
size = element.size
width = size['width']
height = size['height']
sample_width = width / 3 / 2
sample_height = height / 3 / 2
onex = x + sample_width
oney = y + sample_height
twox = x + sample_width * 3
twoy = y + sample_height
threex = x + sample_width * 5
threey = y + sample_width
fourx = x + sample_width * 3
foury = y + sample_width * 3
TouchAction(local.driver).press(x=onex, y=oney).wait(300).move_to(x
=twox - onex, y=twoy - oney).wait(300).move_to(x=threex - twox,
y=threey - twoy).wait(300).move_to(x=fourx - threex, y=foury -
threey).perform()
def set_lock_check(self, name):
return local.pyapp.wait_and_save_exception(
'android=>new UiSelector().text("修改手势密码")', name)
class Page(SetLock):
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BasePage(object):
def __init__(self, driver=None):
if driver is None:
local.driver = driver_queue.get()
local.pyapp = Pyapp(local.driver)
else:
local.driver = driver
local.pyapp = Pyapp(driver)
def quit(self):
local.pyapp.quit()
def reset_package(self):
local.pyapp.reset()
def move(self, a=1, b=2):
befor = self.source[a]
after = self.source[b]
r = None, after[1] - befor[1], after[2] - befor[2]
return r
<|reserved_special_token_0|>
class QQ_Login_Page(BasePage):
def login(self):
local.pyapp.click('android=>new UiSelector().text("登 录")')
def username(self):
local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)
def passwd(self):
local.pyapp.type('content=>密码 安全', 'besttest123')
def left_close(self):
css = 'android=>new UiSelector().text("关闭")'
local.pyapp.click(css)
def login_check(self, name):
return local.pyapp.wait_and_save_exception(
'android=>new UiSelector().text("登 录")', name)
class SetLock(QQ_Login_Page):
def photo(self):
local.pyapp.click('content=>帐户及设置')
def set_up(self):
local.pyapp.click('content=>设置')
def set_up_of_account(self):
local.pyapp.click('android=>new UiSelector().text("帐号、设备安全")')
def set_gesture_passwd(self):
local.pyapp.click('content=>手势密码锁定')
def create_gesture(self):
local.pyapp.click('android=>new UiSelector().text("创建手势密码")')
def set_gesture(self):
element = local.pyapp.get_elements('class=>android.view.View')[12]
location = element.location
x = location['x']
y = location['y']
size = element.size
width = size['width']
height = size['height']
sample_width = width / 3 / 2
sample_height = height / 3 / 2
onex = x + sample_width
oney = y + sample_height
twox = x + sample_width * 3
twoy = y + sample_height
threex = x + sample_width * 5
threey = y + sample_width
fourx = x + sample_width * 3
foury = y + sample_width * 3
TouchAction(local.driver).press(x=onex, y=oney).wait(300).move_to(x
=twox - onex, y=twoy - oney).wait(300).move_to(x=threex - twox,
y=threey - twoy).wait(300).move_to(x=fourx - threex, y=foury -
threey).perform()
def set_lock_check(self, name):
return local.pyapp.wait_and_save_exception(
'android=>new UiSelector().text("修改手势密码")', name)
class Page(SetLock):
pass
<|reserved_special_token_1|>
from lib.appData import driver_queue
from lib.pyapp import Pyapp
import threading
from appium.webdriver.common.touch_action import TouchAction
from lib.logger import logger
import time
local = threading.local()
class BasePage(object):
def __init__(self, driver=None):
if driver is None:
local.driver = driver_queue.get()
local.pyapp = Pyapp(local.driver)
else:
local.driver = driver
local.pyapp = Pyapp(driver)
def quit(self):
local.pyapp.quit()
def reset_package(self):
local.pyapp.reset()
def move(self, a=1, b=2):
befor = self.source[a]
after = self.source[b]
r = (None, after[1] - befor[1], after[2] - befor[2])
return r
def relieve_device_lock_qq(self, num):
element = local.pyapp.get_elements('class=>android.view.View')[num]
location = element.location
logger.debug('location: %s' % location)
size = element.size
logger.debug('size: %s' % size)
self.source = {1: (None, location["x"] + size["width"] / 6, location["y"] + size["height"] / 6),
2: (None, location["x"] + size["width"] / 6 * 3, location["y"] + size["height"] / 6),
3: (None, location["x"] + size["width"] / 6 * 5, location["y"] + size["height"] / 6),
4: (None, location["x"] + size["width"] / 6, location["y"] + size["height"] / 6 * 3),
5: (None, location["x"] + size["width"] / 6 * 3, location["y"] + size["height"] / 6 * 3),
6: (None, location["x"] + size["width"] / 6 * 5, location["y"] + size["height"] / 6 * 3),
7: (None, location["x"] + size["width"] / 6, location["y"] + size["height"] / 6 * 5),
8: (None, location["x"] + size["width"] / 6 * 3, location["y"] + size["height"] / 6 * 5),
9: (None, location["x"] + size["width"] / 6 * 5, location["y"] + size["height"] / 6 * 5)}
logger.debug('拆分后的9个图:%s' % self.source)
TouchAction(local.driver).press(*self.source[1]).wait(300).move_to(*self.move(1, 2)).wait(300).move_to(
*self.move(2, 3)).wait(300).move_to(*self.move(3, 5)).wait(300).move_to(*self.move(5, 7)).wait(
300).move_to(
*self.move(7, 8)).wait(300).move_to(*self.move(8, 9)).wait(300).release().perform()
class QQ_Login_Page(BasePage):
def login(self):
local.pyapp.click('android=>new UiSelector().text("登 录")')
def username(self):
local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)
def passwd(self):
local.pyapp.type('content=>密码 安全', 'besttest123')
def left_close(self):
css = 'android=>new UiSelector().text("关闭")'
local.pyapp.click(css)
def login_check(self, name):
return local.pyapp.wait_and_save_exception('android=>new UiSelector().text("登 录")', name)
class SetLock(QQ_Login_Page):
def photo(self):
local.pyapp.click('content=>帐户及设置')
def set_up(self):
local.pyapp.click('content=>设置')
def set_up_of_account(self):
local.pyapp.click('android=>new UiSelector().text("帐号、设备安全")')
def set_gesture_passwd(self):
local.pyapp.click('content=>手势密码锁定')
def create_gesture(self):
local.pyapp.click('android=>new UiSelector().text("创建手势密码")')
# def set_gesture(self):
# self.relieve_device_lock_qq(12)
# time.sleep(1)
# self.relieve_device_lock_qq(12)
def set_gesture(self):
element = local.pyapp.get_elements('class=>android.view.View')[12]
location = element.location
x = location['x']
y = location['y']
size = element.size
width = size['width']
height = size['height']
sample_width = width/3/2
sample_height = height/3/2
onex= x+sample_width
oney= y+sample_height
twox = x + sample_width * 3
twoy = y +sample_height
threex = x + sample_width * 5
threey = y + sample_width
fourx = x + sample_width * 3
foury = y + sample_width * 3
TouchAction(local.driver).press(x=onex,y=oney).wait(300).move_to(x=twox-onex,y=twoy-oney).wait(300).move_to(x=threex-twox,y=threey-twoy).wait(300).move_to(x=fourx-threex,y=foury-threey).perform()
def set_lock_check(self, name):
return local.pyapp.wait_and_save_exception('android=>new UiSelector().text("修改手势密码")', name)
class Page(SetLock):
pass
|
flexible
|
{
"blob_id": "aa51c8f736461f147704c1ec0669c265348fcb80",
"index": 6869,
"step-1": "<mask token>\n\n\nclass QQ_Login_Page(BasePage):\n\n def login(self):\n local.pyapp.click('android=>new UiSelector().text(\"登 录\")')\n\n def username(self):\n local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)\n\n def passwd(self):\n local.pyapp.type('content=>密码 安全', 'besttest123')\n\n def left_close(self):\n css = 'android=>new UiSelector().text(\"关闭\")'\n local.pyapp.click(css)\n\n def login_check(self, name):\n return local.pyapp.wait_and_save_exception(\n 'android=>new UiSelector().text(\"登 录\")', name)\n\n\nclass SetLock(QQ_Login_Page):\n\n def photo(self):\n local.pyapp.click('content=>帐户及设置')\n\n def set_up(self):\n local.pyapp.click('content=>设置')\n\n def set_up_of_account(self):\n local.pyapp.click('android=>new UiSelector().text(\"帐号、设备安全\")')\n\n def set_gesture_passwd(self):\n local.pyapp.click('content=>手势密码锁定')\n\n def create_gesture(self):\n local.pyapp.click('android=>new UiSelector().text(\"创建手势密码\")')\n\n def set_gesture(self):\n element = local.pyapp.get_elements('class=>android.view.View')[12]\n location = element.location\n x = location['x']\n y = location['y']\n size = element.size\n width = size['width']\n height = size['height']\n sample_width = width / 3 / 2\n sample_height = height / 3 / 2\n onex = x + sample_width\n oney = y + sample_height\n twox = x + sample_width * 3\n twoy = y + sample_height\n threex = x + sample_width * 5\n threey = y + sample_width\n fourx = x + sample_width * 3\n foury = y + sample_width * 3\n TouchAction(local.driver).press(x=onex, y=oney).wait(300).move_to(x\n =twox - onex, y=twoy - oney).wait(300).move_to(x=threex - twox,\n y=threey - twoy).wait(300).move_to(x=fourx - threex, y=foury -\n threey).perform()\n\n def set_lock_check(self, name):\n return local.pyapp.wait_and_save_exception(\n 'android=>new UiSelector().text(\"修改手势密码\")', name)\n\n\nclass Page(SetLock):\n pass\n",
"step-2": "<mask token>\n\n\nclass BasePage(object):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass QQ_Login_Page(BasePage):\n\n def login(self):\n local.pyapp.click('android=>new UiSelector().text(\"登 录\")')\n\n def username(self):\n local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)\n\n def passwd(self):\n local.pyapp.type('content=>密码 安全', 'besttest123')\n\n def left_close(self):\n css = 'android=>new UiSelector().text(\"关闭\")'\n local.pyapp.click(css)\n\n def login_check(self, name):\n return local.pyapp.wait_and_save_exception(\n 'android=>new UiSelector().text(\"登 录\")', name)\n\n\nclass SetLock(QQ_Login_Page):\n\n def photo(self):\n local.pyapp.click('content=>帐户及设置')\n\n def set_up(self):\n local.pyapp.click('content=>设置')\n\n def set_up_of_account(self):\n local.pyapp.click('android=>new UiSelector().text(\"帐号、设备安全\")')\n\n def set_gesture_passwd(self):\n local.pyapp.click('content=>手势密码锁定')\n\n def create_gesture(self):\n local.pyapp.click('android=>new UiSelector().text(\"创建手势密码\")')\n\n def set_gesture(self):\n element = local.pyapp.get_elements('class=>android.view.View')[12]\n location = element.location\n x = location['x']\n y = location['y']\n size = element.size\n width = size['width']\n height = size['height']\n sample_width = width / 3 / 2\n sample_height = height / 3 / 2\n onex = x + sample_width\n oney = y + sample_height\n twox = x + sample_width * 3\n twoy = y + sample_height\n threex = x + sample_width * 5\n threey = y + sample_width\n fourx = x + sample_width * 3\n foury = y + sample_width * 3\n TouchAction(local.driver).press(x=onex, y=oney).wait(300).move_to(x\n =twox - onex, y=twoy - oney).wait(300).move_to(x=threex - twox,\n y=threey - twoy).wait(300).move_to(x=fourx - threex, y=foury -\n threey).perform()\n\n def set_lock_check(self, name):\n return local.pyapp.wait_and_save_exception(\n 'android=>new UiSelector().text(\"修改手势密码\")', name)\n\n\nclass Page(SetLock):\n pass\n",
"step-3": "<mask token>\n\n\nclass BasePage(object):\n\n def __init__(self, driver=None):\n if driver is None:\n local.driver = driver_queue.get()\n local.pyapp = Pyapp(local.driver)\n else:\n local.driver = driver\n local.pyapp = Pyapp(driver)\n\n def quit(self):\n local.pyapp.quit()\n <mask token>\n <mask token>\n <mask token>\n\n\nclass QQ_Login_Page(BasePage):\n\n def login(self):\n local.pyapp.click('android=>new UiSelector().text(\"登 录\")')\n\n def username(self):\n local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)\n\n def passwd(self):\n local.pyapp.type('content=>密码 安全', 'besttest123')\n\n def left_close(self):\n css = 'android=>new UiSelector().text(\"关闭\")'\n local.pyapp.click(css)\n\n def login_check(self, name):\n return local.pyapp.wait_and_save_exception(\n 'android=>new UiSelector().text(\"登 录\")', name)\n\n\nclass SetLock(QQ_Login_Page):\n\n def photo(self):\n local.pyapp.click('content=>帐户及设置')\n\n def set_up(self):\n local.pyapp.click('content=>设置')\n\n def set_up_of_account(self):\n local.pyapp.click('android=>new UiSelector().text(\"帐号、设备安全\")')\n\n def set_gesture_passwd(self):\n local.pyapp.click('content=>手势密码锁定')\n\n def create_gesture(self):\n local.pyapp.click('android=>new UiSelector().text(\"创建手势密码\")')\n\n def set_gesture(self):\n element = local.pyapp.get_elements('class=>android.view.View')[12]\n location = element.location\n x = location['x']\n y = location['y']\n size = element.size\n width = size['width']\n height = size['height']\n sample_width = width / 3 / 2\n sample_height = height / 3 / 2\n onex = x + sample_width\n oney = y + sample_height\n twox = x + sample_width * 3\n twoy = y + sample_height\n threex = x + sample_width * 5\n threey = y + sample_width\n fourx = x + sample_width * 3\n foury = y + sample_width * 3\n TouchAction(local.driver).press(x=onex, y=oney).wait(300).move_to(x\n =twox - onex, y=twoy - oney).wait(300).move_to(x=threex - twox,\n y=threey - twoy).wait(300).move_to(x=fourx - threex, y=foury -\n threey).perform()\n\n def set_lock_check(self, name):\n return local.pyapp.wait_and_save_exception(\n 'android=>new UiSelector().text(\"修改手势密码\")', name)\n\n\nclass Page(SetLock):\n pass\n",
"step-4": "<mask token>\n\n\nclass BasePage(object):\n\n def __init__(self, driver=None):\n if driver is None:\n local.driver = driver_queue.get()\n local.pyapp = Pyapp(local.driver)\n else:\n local.driver = driver\n local.pyapp = Pyapp(driver)\n\n def quit(self):\n local.pyapp.quit()\n\n def reset_package(self):\n local.pyapp.reset()\n\n def move(self, a=1, b=2):\n befor = self.source[a]\n after = self.source[b]\n r = None, after[1] - befor[1], after[2] - befor[2]\n return r\n <mask token>\n\n\nclass QQ_Login_Page(BasePage):\n\n def login(self):\n local.pyapp.click('android=>new UiSelector().text(\"登 录\")')\n\n def username(self):\n local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)\n\n def passwd(self):\n local.pyapp.type('content=>密码 安全', 'besttest123')\n\n def left_close(self):\n css = 'android=>new UiSelector().text(\"关闭\")'\n local.pyapp.click(css)\n\n def login_check(self, name):\n return local.pyapp.wait_and_save_exception(\n 'android=>new UiSelector().text(\"登 录\")', name)\n\n\nclass SetLock(QQ_Login_Page):\n\n def photo(self):\n local.pyapp.click('content=>帐户及设置')\n\n def set_up(self):\n local.pyapp.click('content=>设置')\n\n def set_up_of_account(self):\n local.pyapp.click('android=>new UiSelector().text(\"帐号、设备安全\")')\n\n def set_gesture_passwd(self):\n local.pyapp.click('content=>手势密码锁定')\n\n def create_gesture(self):\n local.pyapp.click('android=>new UiSelector().text(\"创建手势密码\")')\n\n def set_gesture(self):\n element = local.pyapp.get_elements('class=>android.view.View')[12]\n location = element.location\n x = location['x']\n y = location['y']\n size = element.size\n width = size['width']\n height = size['height']\n sample_width = width / 3 / 2\n sample_height = height / 3 / 2\n onex = x + sample_width\n oney = y + sample_height\n twox = x + sample_width * 3\n twoy = y + sample_height\n threex = x + sample_width * 5\n threey = y + sample_width\n fourx = x + sample_width * 3\n foury = y + sample_width * 3\n TouchAction(local.driver).press(x=onex, y=oney).wait(300).move_to(x\n =twox - onex, y=twoy - oney).wait(300).move_to(x=threex - twox,\n y=threey - twoy).wait(300).move_to(x=fourx - threex, y=foury -\n threey).perform()\n\n def set_lock_check(self, name):\n return local.pyapp.wait_and_save_exception(\n 'android=>new UiSelector().text(\"修改手势密码\")', name)\n\n\nclass Page(SetLock):\n pass\n",
"step-5": "from lib.appData import driver_queue\nfrom lib.pyapp import Pyapp\nimport threading\nfrom appium.webdriver.common.touch_action import TouchAction\nfrom lib.logger import logger\nimport time\nlocal = threading.local()\n\n\nclass BasePage(object):\n def __init__(self, driver=None):\n if driver is None:\n local.driver = driver_queue.get()\n local.pyapp = Pyapp(local.driver)\n else:\n local.driver = driver\n local.pyapp = Pyapp(driver)\n\n def quit(self):\n local.pyapp.quit()\n\n def reset_package(self):\n local.pyapp.reset()\n\n\n def move(self, a=1, b=2):\n befor = self.source[a]\n after = self.source[b]\n r = (None, after[1] - befor[1], after[2] - befor[2])\n return r\n\n\n def relieve_device_lock_qq(self, num):\n\n element = local.pyapp.get_elements('class=>android.view.View')[num]\n location = element.location\n logger.debug('location: %s' % location)\n size = element.size\n logger.debug('size: %s' % size)\n self.source = {1: (None, location[\"x\"] + size[\"width\"] / 6, location[\"y\"] + size[\"height\"] / 6),\n 2: (None, location[\"x\"] + size[\"width\"] / 6 * 3, location[\"y\"] + size[\"height\"] / 6),\n 3: (None, location[\"x\"] + size[\"width\"] / 6 * 5, location[\"y\"] + size[\"height\"] / 6),\n 4: (None, location[\"x\"] + size[\"width\"] / 6, location[\"y\"] + size[\"height\"] / 6 * 3),\n 5: (None, location[\"x\"] + size[\"width\"] / 6 * 3, location[\"y\"] + size[\"height\"] / 6 * 3),\n 6: (None, location[\"x\"] + size[\"width\"] / 6 * 5, location[\"y\"] + size[\"height\"] / 6 * 3),\n 7: (None, location[\"x\"] + size[\"width\"] / 6, location[\"y\"] + size[\"height\"] / 6 * 5),\n 8: (None, location[\"x\"] + size[\"width\"] / 6 * 3, location[\"y\"] + size[\"height\"] / 6 * 5),\n 9: (None, location[\"x\"] + size[\"width\"] / 6 * 5, location[\"y\"] + size[\"height\"] / 6 * 5)}\n logger.debug('拆分后的9个图:%s' % self.source)\n TouchAction(local.driver).press(*self.source[1]).wait(300).move_to(*self.move(1, 2)).wait(300).move_to(\n *self.move(2, 3)).wait(300).move_to(*self.move(3, 5)).wait(300).move_to(*self.move(5, 7)).wait(\n 300).move_to(\n *self.move(7, 8)).wait(300).move_to(*self.move(8, 9)).wait(300).release().perform()\n\nclass QQ_Login_Page(BasePage):\n def login(self):\n local.pyapp.click('android=>new UiSelector().text(\"登 录\")')\n\n def username(self):\n local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)\n\n def passwd(self):\n local.pyapp.type('content=>密码 安全', 'besttest123')\n\n def left_close(self):\n css = 'android=>new UiSelector().text(\"关闭\")'\n local.pyapp.click(css)\n\n def login_check(self, name):\n return local.pyapp.wait_and_save_exception('android=>new UiSelector().text(\"登 录\")', name)\n\n\nclass SetLock(QQ_Login_Page):\n def photo(self):\n local.pyapp.click('content=>帐户及设置')\n\n def set_up(self):\n local.pyapp.click('content=>设置')\n\n def set_up_of_account(self):\n local.pyapp.click('android=>new UiSelector().text(\"帐号、设备安全\")')\n\n def set_gesture_passwd(self):\n local.pyapp.click('content=>手势密码锁定')\n\n def create_gesture(self):\n local.pyapp.click('android=>new UiSelector().text(\"创建手势密码\")')\n\n # def set_gesture(self):\n # self.relieve_device_lock_qq(12)\n # time.sleep(1)\n # self.relieve_device_lock_qq(12)\n def set_gesture(self):\n element = local.pyapp.get_elements('class=>android.view.View')[12]\n location = element.location\n x = location['x']\n y = location['y']\n size = element.size\n width = size['width']\n height = size['height']\n sample_width = width/3/2\n sample_height = height/3/2\n onex= x+sample_width\n oney= y+sample_height\n twox = x + sample_width * 3\n twoy = y +sample_height\n threex = x + sample_width * 5\n threey = y + sample_width\n fourx = x + sample_width * 3\n foury = y + sample_width * 3\n TouchAction(local.driver).press(x=onex,y=oney).wait(300).move_to(x=twox-onex,y=twoy-oney).wait(300).move_to(x=threex-twox,y=threey-twoy).wait(300).move_to(x=fourx-threex,y=foury-threey).perform()\n\n def set_lock_check(self, name):\n return local.pyapp.wait_and_save_exception('android=>new UiSelector().text(\"修改手势密码\")', name)\n\nclass Page(SetLock):\n pass\n",
"step-ids": [
15,
16,
18,
20,
24
]
}
|
[
15,
16,
18,
20,
24
] |
<|reserved_special_token_0|>
def transform_to_my_format(data):
d = defaultdict(dict)
for i1, i2, i3 in re.findall('([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)',
data):
d[i1].update({i2: float(i3)})
return d
<|reserved_special_token_0|>
def dijkstra_latency(start, goal):
Graph_Lat = transform_to_my_format(data_from_file)
graph = Graph_Lat
shortest_distance = {}
predecessor = {}
unseenNodes = {}
unseenNodes = graph
infinity = 9999999
path = []
for node in unseenNodes:
shortest_distance[node] = infinity
shortest_distance[start] = 0
while unseenNodes:
minNode = None
for node in unseenNodes:
if minNode is None:
minNode = node
elif shortest_distance[node] < shortest_distance[minNode]:
minNode = node
for childNode, weight in graph[minNode].items():
if weight + shortest_distance[minNode] < shortest_distance[
childNode]:
shortest_distance[childNode] = weight + shortest_distance[
minNode]
predecessor[childNode] = minNode
unseenNodes.pop(minNode)
currentNode = goal
while currentNode != start:
try:
path.insert(0, currentNode)
currentNode = predecessor[currentNode]
except KeyError:
print('Path not reachable')
break
path.insert(0, start)
if shortest_distance[goal] != infinity:
dj2 = float(shortest_distance[goal]) * 1.1
dj3 = float(shortest_distance[goal]) * 1.2
f = open('output.txt', 'a+')
if int(start) != int(goal):
f.write('LC' + start + '_' + goal + ',' + '"LC' + start + '_' +
goal + '",' + str(shortest_distance[goal]) + ',' + '100' +
',"Claro",' + '"S' + start + '",' + '"S' + goal + '"' + '\n')
f.write('LM' + start + '_' + goal + ',' + '"LM' + start + '_' +
goal + '",' + str(dj2) + ',' + '75' + ',"Movistar",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.write('LT' + start + '_' + goal + ',' + '"LT' + start + '_' +
goal + '",' + str(dj3) + ',' + '60' + ',"Tigo",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
else:
f.write('LC' + start + '_' + goal + ',' + '"LC' + start + '_' +
goal + '",' + str(shortest_distance[goal]) + ',' + '0' +
',"Claro",' + '"S' + start + '",' + '"S' + goal + '"' + '\n')
f.write('LM' + start + '_' + goal + ',' + '"LM' + start + '_' +
goal + '",' + str(dj2) + ',' + '0' + ',"Movistar",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.write('LT' + start + '_' + goal + ',' + '"LT' + start + '_' +
goal + '",' + str(dj3) + ',' + '0' + ',"Tigo",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def transform_to_my_format(data):
d = defaultdict(dict)
for i1, i2, i3 in re.findall('([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)',
data):
d[i1].update({i2: float(i3)})
return d
<|reserved_special_token_0|>
def dijkstra_latency(start, goal):
Graph_Lat = transform_to_my_format(data_from_file)
graph = Graph_Lat
shortest_distance = {}
predecessor = {}
unseenNodes = {}
unseenNodes = graph
infinity = 9999999
path = []
for node in unseenNodes:
shortest_distance[node] = infinity
shortest_distance[start] = 0
while unseenNodes:
minNode = None
for node in unseenNodes:
if minNode is None:
minNode = node
elif shortest_distance[node] < shortest_distance[minNode]:
minNode = node
for childNode, weight in graph[minNode].items():
if weight + shortest_distance[minNode] < shortest_distance[
childNode]:
shortest_distance[childNode] = weight + shortest_distance[
minNode]
predecessor[childNode] = minNode
unseenNodes.pop(minNode)
currentNode = goal
while currentNode != start:
try:
path.insert(0, currentNode)
currentNode = predecessor[currentNode]
except KeyError:
print('Path not reachable')
break
path.insert(0, start)
if shortest_distance[goal] != infinity:
dj2 = float(shortest_distance[goal]) * 1.1
dj3 = float(shortest_distance[goal]) * 1.2
f = open('output.txt', 'a+')
if int(start) != int(goal):
f.write('LC' + start + '_' + goal + ',' + '"LC' + start + '_' +
goal + '",' + str(shortest_distance[goal]) + ',' + '100' +
',"Claro",' + '"S' + start + '",' + '"S' + goal + '"' + '\n')
f.write('LM' + start + '_' + goal + ',' + '"LM' + start + '_' +
goal + '",' + str(dj2) + ',' + '75' + ',"Movistar",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.write('LT' + start + '_' + goal + ',' + '"LT' + start + '_' +
goal + '",' + str(dj3) + ',' + '60' + ',"Tigo",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
else:
f.write('LC' + start + '_' + goal + ',' + '"LC' + start + '_' +
goal + '",' + str(shortest_distance[goal]) + ',' + '0' +
',"Claro",' + '"S' + start + '",' + '"S' + goal + '"' + '\n')
f.write('LM' + start + '_' + goal + ',' + '"LM' + start + '_' +
goal + '",' + str(dj2) + ',' + '0' + ',"Movistar",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.write('LT' + start + '_' + goal + ',' + '"LT' + start + '_' +
goal + '",' + str(dj3) + ',' + '0' + ',"Tigo",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.close()
<|reserved_special_token_0|>
for i in range(max):
for j in range(max):
dijkstra_latency(str(i), str(j))
<|reserved_special_token_0|>
for i in range(max):
f = open('output.txt', 'a+')
f.write('C' + str(i) + ',S' + str(i) + ',priceController,False' + '\n')
f.close()
for i in range(max):
f = open('output.txt', 'a+')
f.write('S' + str(i) + ',' + str(randint(10000, 500000)) + ',' + str(
randint(2, 10)) + '\n')
f.close()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
data_from_file = open('newAtmnet.txt', 'r').read()
def transform_to_my_format(data):
d = defaultdict(dict)
for i1, i2, i3 in re.findall('([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)',
data):
d[i1].update({i2: float(i3)})
return d
Graph_Lat = transform_to_my_format(data_from_file)
def dijkstra_latency(start, goal):
Graph_Lat = transform_to_my_format(data_from_file)
graph = Graph_Lat
shortest_distance = {}
predecessor = {}
unseenNodes = {}
unseenNodes = graph
infinity = 9999999
path = []
for node in unseenNodes:
shortest_distance[node] = infinity
shortest_distance[start] = 0
while unseenNodes:
minNode = None
for node in unseenNodes:
if minNode is None:
minNode = node
elif shortest_distance[node] < shortest_distance[minNode]:
minNode = node
for childNode, weight in graph[minNode].items():
if weight + shortest_distance[minNode] < shortest_distance[
childNode]:
shortest_distance[childNode] = weight + shortest_distance[
minNode]
predecessor[childNode] = minNode
unseenNodes.pop(minNode)
currentNode = goal
while currentNode != start:
try:
path.insert(0, currentNode)
currentNode = predecessor[currentNode]
except KeyError:
print('Path not reachable')
break
path.insert(0, start)
if shortest_distance[goal] != infinity:
dj2 = float(shortest_distance[goal]) * 1.1
dj3 = float(shortest_distance[goal]) * 1.2
f = open('output.txt', 'a+')
if int(start) != int(goal):
f.write('LC' + start + '_' + goal + ',' + '"LC' + start + '_' +
goal + '",' + str(shortest_distance[goal]) + ',' + '100' +
',"Claro",' + '"S' + start + '",' + '"S' + goal + '"' + '\n')
f.write('LM' + start + '_' + goal + ',' + '"LM' + start + '_' +
goal + '",' + str(dj2) + ',' + '75' + ',"Movistar",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.write('LT' + start + '_' + goal + ',' + '"LT' + start + '_' +
goal + '",' + str(dj3) + ',' + '60' + ',"Tigo",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
else:
f.write('LC' + start + '_' + goal + ',' + '"LC' + start + '_' +
goal + '",' + str(shortest_distance[goal]) + ',' + '0' +
',"Claro",' + '"S' + start + '",' + '"S' + goal + '"' + '\n')
f.write('LM' + start + '_' + goal + ',' + '"LM' + start + '_' +
goal + '",' + str(dj2) + ',' + '0' + ',"Movistar",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.write('LT' + start + '_' + goal + ',' + '"LT' + start + '_' +
goal + '",' + str(dj3) + ',' + '0' + ',"Tigo",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.close()
max = len(Graph_Lat)
for i in range(max):
for j in range(max):
dijkstra_latency(str(i), str(j))
max = len(Graph_Lat)
for i in range(max):
f = open('output.txt', 'a+')
f.write('C' + str(i) + ',S' + str(i) + ',priceController,False' + '\n')
f.close()
for i in range(max):
f = open('output.txt', 'a+')
f.write('S' + str(i) + ',' + str(randint(10000, 500000)) + ',' + str(
randint(2, 10)) + '\n')
f.close()
<|reserved_special_token_1|>
import re
from collections import defaultdict
from random import randint
data_from_file = open('newAtmnet.txt', 'r').read()
def transform_to_my_format(data):
d = defaultdict(dict)
for i1, i2, i3 in re.findall('([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)',
data):
d[i1].update({i2: float(i3)})
return d
Graph_Lat = transform_to_my_format(data_from_file)
def dijkstra_latency(start, goal):
Graph_Lat = transform_to_my_format(data_from_file)
graph = Graph_Lat
shortest_distance = {}
predecessor = {}
unseenNodes = {}
unseenNodes = graph
infinity = 9999999
path = []
for node in unseenNodes:
shortest_distance[node] = infinity
shortest_distance[start] = 0
while unseenNodes:
minNode = None
for node in unseenNodes:
if minNode is None:
minNode = node
elif shortest_distance[node] < shortest_distance[minNode]:
minNode = node
for childNode, weight in graph[minNode].items():
if weight + shortest_distance[minNode] < shortest_distance[
childNode]:
shortest_distance[childNode] = weight + shortest_distance[
minNode]
predecessor[childNode] = minNode
unseenNodes.pop(minNode)
currentNode = goal
while currentNode != start:
try:
path.insert(0, currentNode)
currentNode = predecessor[currentNode]
except KeyError:
print('Path not reachable')
break
path.insert(0, start)
if shortest_distance[goal] != infinity:
dj2 = float(shortest_distance[goal]) * 1.1
dj3 = float(shortest_distance[goal]) * 1.2
f = open('output.txt', 'a+')
if int(start) != int(goal):
f.write('LC' + start + '_' + goal + ',' + '"LC' + start + '_' +
goal + '",' + str(shortest_distance[goal]) + ',' + '100' +
',"Claro",' + '"S' + start + '",' + '"S' + goal + '"' + '\n')
f.write('LM' + start + '_' + goal + ',' + '"LM' + start + '_' +
goal + '",' + str(dj2) + ',' + '75' + ',"Movistar",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.write('LT' + start + '_' + goal + ',' + '"LT' + start + '_' +
goal + '",' + str(dj3) + ',' + '60' + ',"Tigo",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
else:
f.write('LC' + start + '_' + goal + ',' + '"LC' + start + '_' +
goal + '",' + str(shortest_distance[goal]) + ',' + '0' +
',"Claro",' + '"S' + start + '",' + '"S' + goal + '"' + '\n')
f.write('LM' + start + '_' + goal + ',' + '"LM' + start + '_' +
goal + '",' + str(dj2) + ',' + '0' + ',"Movistar",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.write('LT' + start + '_' + goal + ',' + '"LT' + start + '_' +
goal + '",' + str(dj3) + ',' + '0' + ',"Tigo",' + '"S' +
start + '",' + '"S' + goal + '"' + '\n')
f.close()
max = len(Graph_Lat)
for i in range(max):
for j in range(max):
dijkstra_latency(str(i), str(j))
max = len(Graph_Lat)
for i in range(max):
f = open('output.txt', 'a+')
f.write('C' + str(i) + ',S' + str(i) + ',priceController,False' + '\n')
f.close()
for i in range(max):
f = open('output.txt', 'a+')
f.write('S' + str(i) + ',' + str(randint(10000, 500000)) + ',' + str(
randint(2, 10)) + '\n')
f.close()
<|reserved_special_token_1|>
##############################################################################
# Nombre : import.py
# Descripción : It takes the information from Transfom.sh Initial Node
# Final Node and HAVERSINE Formule
#
# Parámetros:
# Realizado Por :
#
# HISTORIAL DE CAMBIOS:
#Richard Abuabara Caserta
#
##############################################################################
import re
from collections import defaultdict
#from pprint import pprint
from random import randint
data_from_file=open('newAtmnet.txt', 'r').read()
def transform_to_my_format(data):
d = defaultdict(dict)
for (i1, i2, i3) in re.findall(r'([\d\.]+)\s+([\d\.]+)\s+([\d\.]+)', data):
d[i1].update({i2: float(i3)})
return d
Graph_Lat=transform_to_my_format(data_from_file)
def dijkstra_latency(start,goal):
Graph_Lat=transform_to_my_format(data_from_file)
graph=Graph_Lat
shortest_distance = {}
predecessor = {}
unseenNodes= {}
unseenNodes = graph
infinity = 9999999
path = []
for node in unseenNodes:
shortest_distance[node] = infinity
shortest_distance[start] = 0
while unseenNodes:
minNode = None
for node in unseenNodes:
if minNode is None:
minNode = node
elif shortest_distance[node] < shortest_distance[minNode]:
minNode = node
for childNode, weight in graph[minNode].items():
if weight + shortest_distance[minNode] < shortest_distance[childNode]:
shortest_distance[childNode] = weight + shortest_distance[minNode]
predecessor[childNode] = minNode
unseenNodes.pop(minNode)
currentNode = goal
while currentNode != start:
try:
path.insert(0,currentNode)
currentNode = predecessor[currentNode]
except KeyError:
print('Path not reachable')
break
path.insert(0,start)
if shortest_distance[goal] != infinity:
dj2=float(shortest_distance[goal])*1.1 #Latencia +/- 10
dj3=float(shortest_distance[goal])*1.2 #Price +/- 20 Verificar ojooo
f= open("output.txt","a+")
if (int(start) != int(goal)):
f.write('LC'+start+'_'+goal+','+'"LC'+start+'_'+goal+'",'+str(shortest_distance[goal])+','+'100'+',"Claro",'+'"S'+start+'",'+'"S'+goal+'"'+ "\n")
f.write('LM'+start+'_'+goal+','+'"LM'+start+'_'+goal+'",'+str(dj2)+','+'75'+',"Movistar",'+'"S'+start+'",'+'"S'+goal+'"'+ "\n")
f.write('LT'+start+'_'+goal+','+'"LT'+start+'_'+goal+'",'+str(dj3)+','+'60'+',"Tigo",'+'"S'+start+'",'+'"S'+goal+'"'+ "\n")
#f.write('mynet.addLink(LT'+start+'_'+goal+')'+ "\n")
else:
f.write('LC'+start+'_'+goal+','+'"LC'+start+'_'+goal+'",'+str(shortest_distance[goal])+','+'0'+',"Claro",'+'"S'+start+'",'+'"S'+goal+'"'+ "\n")
f.write('LM'+start+'_'+goal+','+'"LM'+start+'_'+goal+'",'+str(dj2)+','+'0'+',"Movistar",'+'"S'+start+'",'+'"S'+goal+'"'+ "\n")
f.write('LT'+start+'_'+goal+','+'"LT'+start+'_'+goal+'",'+str(dj3)+','+'0'+',"Tigo",'+'"S'+start+'",'+'"S'+goal+'"'+ "\n")
#f.write('mynet.addLink(LT'+start+'_'+goal+')'+ "\n")
f.close()
####modulo impresion######
max=(len(Graph_Lat))
for i in range(max): #este es el for - source
#print (i)
for j in range(max):
dijkstra_latency(str(i), str(j))
#debo imprimir L571=Link("L571",77,770,"operador1",5,7)
########Imprimir 2do Rquerimiento################
max=(len(Graph_Lat))
for i in range(max): #este es el for - source
f= open("output.txt","a+")
f.write('C'+str(i)+',S'+str(i)+',priceController,False'+"\n")
f.close()
#Switch creation and aggregation
for i in range(max): #este es el for - source
f= open("output.txt","a+")
#f.write('S'+str(i)+' = Switch("S'+str(i)+'", '+str(randint(10000,500000))+', "C'+str(i)+'", '+str(randint(2,10))+')'+"\n")
f.write('S'+str(i)+','+str(randint(10000,500000))+','+str(randint(2,10))+"\n")
f.close()
#S0 = Switch("S0", randint(10000,500000), "C0", randint(2,10))
#mynet.addSwitch(S0)
|
flexible
|
{
"blob_id": "0018cbb1d945ad1b6469804e7993afee44406fd1",
"index": 2895,
"step-1": "<mask token>\n\n\ndef transform_to_my_format(data):\n d = defaultdict(dict)\n for i1, i2, i3 in re.findall('([\\\\d\\\\.]+)\\\\s+([\\\\d\\\\.]+)\\\\s+([\\\\d\\\\.]+)',\n data):\n d[i1].update({i2: float(i3)})\n return d\n\n\n<mask token>\n\n\ndef dijkstra_latency(start, goal):\n Graph_Lat = transform_to_my_format(data_from_file)\n graph = Graph_Lat\n shortest_distance = {}\n predecessor = {}\n unseenNodes = {}\n unseenNodes = graph\n infinity = 9999999\n path = []\n for node in unseenNodes:\n shortest_distance[node] = infinity\n shortest_distance[start] = 0\n while unseenNodes:\n minNode = None\n for node in unseenNodes:\n if minNode is None:\n minNode = node\n elif shortest_distance[node] < shortest_distance[minNode]:\n minNode = node\n for childNode, weight in graph[minNode].items():\n if weight + shortest_distance[minNode] < shortest_distance[\n childNode]:\n shortest_distance[childNode] = weight + shortest_distance[\n minNode]\n predecessor[childNode] = minNode\n unseenNodes.pop(minNode)\n currentNode = goal\n while currentNode != start:\n try:\n path.insert(0, currentNode)\n currentNode = predecessor[currentNode]\n except KeyError:\n print('Path not reachable')\n break\n path.insert(0, start)\n if shortest_distance[goal] != infinity:\n dj2 = float(shortest_distance[goal]) * 1.1\n dj3 = float(shortest_distance[goal]) * 1.2\n f = open('output.txt', 'a+')\n if int(start) != int(goal):\n f.write('LC' + start + '_' + goal + ',' + '\"LC' + start + '_' +\n goal + '\",' + str(shortest_distance[goal]) + ',' + '100' +\n ',\"Claro\",' + '\"S' + start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LM' + start + '_' + goal + ',' + '\"LM' + start + '_' +\n goal + '\",' + str(dj2) + ',' + '75' + ',\"Movistar\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LT' + start + '_' + goal + ',' + '\"LT' + start + '_' +\n goal + '\",' + str(dj3) + ',' + '60' + ',\"Tigo\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n else:\n f.write('LC' + start + '_' + goal + ',' + '\"LC' + start + '_' +\n goal + '\",' + str(shortest_distance[goal]) + ',' + '0' +\n ',\"Claro\",' + '\"S' + start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LM' + start + '_' + goal + ',' + '\"LM' + start + '_' +\n goal + '\",' + str(dj2) + ',' + '0' + ',\"Movistar\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LT' + start + '_' + goal + ',' + '\"LT' + start + '_' +\n goal + '\",' + str(dj3) + ',' + '0' + ',\"Tigo\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef transform_to_my_format(data):\n d = defaultdict(dict)\n for i1, i2, i3 in re.findall('([\\\\d\\\\.]+)\\\\s+([\\\\d\\\\.]+)\\\\s+([\\\\d\\\\.]+)',\n data):\n d[i1].update({i2: float(i3)})\n return d\n\n\n<mask token>\n\n\ndef dijkstra_latency(start, goal):\n Graph_Lat = transform_to_my_format(data_from_file)\n graph = Graph_Lat\n shortest_distance = {}\n predecessor = {}\n unseenNodes = {}\n unseenNodes = graph\n infinity = 9999999\n path = []\n for node in unseenNodes:\n shortest_distance[node] = infinity\n shortest_distance[start] = 0\n while unseenNodes:\n minNode = None\n for node in unseenNodes:\n if minNode is None:\n minNode = node\n elif shortest_distance[node] < shortest_distance[minNode]:\n minNode = node\n for childNode, weight in graph[minNode].items():\n if weight + shortest_distance[minNode] < shortest_distance[\n childNode]:\n shortest_distance[childNode] = weight + shortest_distance[\n minNode]\n predecessor[childNode] = minNode\n unseenNodes.pop(minNode)\n currentNode = goal\n while currentNode != start:\n try:\n path.insert(0, currentNode)\n currentNode = predecessor[currentNode]\n except KeyError:\n print('Path not reachable')\n break\n path.insert(0, start)\n if shortest_distance[goal] != infinity:\n dj2 = float(shortest_distance[goal]) * 1.1\n dj3 = float(shortest_distance[goal]) * 1.2\n f = open('output.txt', 'a+')\n if int(start) != int(goal):\n f.write('LC' + start + '_' + goal + ',' + '\"LC' + start + '_' +\n goal + '\",' + str(shortest_distance[goal]) + ',' + '100' +\n ',\"Claro\",' + '\"S' + start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LM' + start + '_' + goal + ',' + '\"LM' + start + '_' +\n goal + '\",' + str(dj2) + ',' + '75' + ',\"Movistar\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LT' + start + '_' + goal + ',' + '\"LT' + start + '_' +\n goal + '\",' + str(dj3) + ',' + '60' + ',\"Tigo\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n else:\n f.write('LC' + start + '_' + goal + ',' + '\"LC' + start + '_' +\n goal + '\",' + str(shortest_distance[goal]) + ',' + '0' +\n ',\"Claro\",' + '\"S' + start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LM' + start + '_' + goal + ',' + '\"LM' + start + '_' +\n goal + '\",' + str(dj2) + ',' + '0' + ',\"Movistar\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LT' + start + '_' + goal + ',' + '\"LT' + start + '_' +\n goal + '\",' + str(dj3) + ',' + '0' + ',\"Tigo\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.close()\n\n\n<mask token>\nfor i in range(max):\n for j in range(max):\n dijkstra_latency(str(i), str(j))\n<mask token>\nfor i in range(max):\n f = open('output.txt', 'a+')\n f.write('C' + str(i) + ',S' + str(i) + ',priceController,False' + '\\n')\n f.close()\nfor i in range(max):\n f = open('output.txt', 'a+')\n f.write('S' + str(i) + ',' + str(randint(10000, 500000)) + ',' + str(\n randint(2, 10)) + '\\n')\n f.close()\n",
"step-3": "<mask token>\ndata_from_file = open('newAtmnet.txt', 'r').read()\n\n\ndef transform_to_my_format(data):\n d = defaultdict(dict)\n for i1, i2, i3 in re.findall('([\\\\d\\\\.]+)\\\\s+([\\\\d\\\\.]+)\\\\s+([\\\\d\\\\.]+)',\n data):\n d[i1].update({i2: float(i3)})\n return d\n\n\nGraph_Lat = transform_to_my_format(data_from_file)\n\n\ndef dijkstra_latency(start, goal):\n Graph_Lat = transform_to_my_format(data_from_file)\n graph = Graph_Lat\n shortest_distance = {}\n predecessor = {}\n unseenNodes = {}\n unseenNodes = graph\n infinity = 9999999\n path = []\n for node in unseenNodes:\n shortest_distance[node] = infinity\n shortest_distance[start] = 0\n while unseenNodes:\n minNode = None\n for node in unseenNodes:\n if minNode is None:\n minNode = node\n elif shortest_distance[node] < shortest_distance[minNode]:\n minNode = node\n for childNode, weight in graph[minNode].items():\n if weight + shortest_distance[minNode] < shortest_distance[\n childNode]:\n shortest_distance[childNode] = weight + shortest_distance[\n minNode]\n predecessor[childNode] = minNode\n unseenNodes.pop(minNode)\n currentNode = goal\n while currentNode != start:\n try:\n path.insert(0, currentNode)\n currentNode = predecessor[currentNode]\n except KeyError:\n print('Path not reachable')\n break\n path.insert(0, start)\n if shortest_distance[goal] != infinity:\n dj2 = float(shortest_distance[goal]) * 1.1\n dj3 = float(shortest_distance[goal]) * 1.2\n f = open('output.txt', 'a+')\n if int(start) != int(goal):\n f.write('LC' + start + '_' + goal + ',' + '\"LC' + start + '_' +\n goal + '\",' + str(shortest_distance[goal]) + ',' + '100' +\n ',\"Claro\",' + '\"S' + start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LM' + start + '_' + goal + ',' + '\"LM' + start + '_' +\n goal + '\",' + str(dj2) + ',' + '75' + ',\"Movistar\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LT' + start + '_' + goal + ',' + '\"LT' + start + '_' +\n goal + '\",' + str(dj3) + ',' + '60' + ',\"Tigo\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n else:\n f.write('LC' + start + '_' + goal + ',' + '\"LC' + start + '_' +\n goal + '\",' + str(shortest_distance[goal]) + ',' + '0' +\n ',\"Claro\",' + '\"S' + start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LM' + start + '_' + goal + ',' + '\"LM' + start + '_' +\n goal + '\",' + str(dj2) + ',' + '0' + ',\"Movistar\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LT' + start + '_' + goal + ',' + '\"LT' + start + '_' +\n goal + '\",' + str(dj3) + ',' + '0' + ',\"Tigo\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.close()\n\n\nmax = len(Graph_Lat)\nfor i in range(max):\n for j in range(max):\n dijkstra_latency(str(i), str(j))\nmax = len(Graph_Lat)\nfor i in range(max):\n f = open('output.txt', 'a+')\n f.write('C' + str(i) + ',S' + str(i) + ',priceController,False' + '\\n')\n f.close()\nfor i in range(max):\n f = open('output.txt', 'a+')\n f.write('S' + str(i) + ',' + str(randint(10000, 500000)) + ',' + str(\n randint(2, 10)) + '\\n')\n f.close()\n",
"step-4": "import re\nfrom collections import defaultdict\nfrom random import randint\ndata_from_file = open('newAtmnet.txt', 'r').read()\n\n\ndef transform_to_my_format(data):\n d = defaultdict(dict)\n for i1, i2, i3 in re.findall('([\\\\d\\\\.]+)\\\\s+([\\\\d\\\\.]+)\\\\s+([\\\\d\\\\.]+)',\n data):\n d[i1].update({i2: float(i3)})\n return d\n\n\nGraph_Lat = transform_to_my_format(data_from_file)\n\n\ndef dijkstra_latency(start, goal):\n Graph_Lat = transform_to_my_format(data_from_file)\n graph = Graph_Lat\n shortest_distance = {}\n predecessor = {}\n unseenNodes = {}\n unseenNodes = graph\n infinity = 9999999\n path = []\n for node in unseenNodes:\n shortest_distance[node] = infinity\n shortest_distance[start] = 0\n while unseenNodes:\n minNode = None\n for node in unseenNodes:\n if minNode is None:\n minNode = node\n elif shortest_distance[node] < shortest_distance[minNode]:\n minNode = node\n for childNode, weight in graph[minNode].items():\n if weight + shortest_distance[minNode] < shortest_distance[\n childNode]:\n shortest_distance[childNode] = weight + shortest_distance[\n minNode]\n predecessor[childNode] = minNode\n unseenNodes.pop(minNode)\n currentNode = goal\n while currentNode != start:\n try:\n path.insert(0, currentNode)\n currentNode = predecessor[currentNode]\n except KeyError:\n print('Path not reachable')\n break\n path.insert(0, start)\n if shortest_distance[goal] != infinity:\n dj2 = float(shortest_distance[goal]) * 1.1\n dj3 = float(shortest_distance[goal]) * 1.2\n f = open('output.txt', 'a+')\n if int(start) != int(goal):\n f.write('LC' + start + '_' + goal + ',' + '\"LC' + start + '_' +\n goal + '\",' + str(shortest_distance[goal]) + ',' + '100' +\n ',\"Claro\",' + '\"S' + start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LM' + start + '_' + goal + ',' + '\"LM' + start + '_' +\n goal + '\",' + str(dj2) + ',' + '75' + ',\"Movistar\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LT' + start + '_' + goal + ',' + '\"LT' + start + '_' +\n goal + '\",' + str(dj3) + ',' + '60' + ',\"Tigo\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n else:\n f.write('LC' + start + '_' + goal + ',' + '\"LC' + start + '_' +\n goal + '\",' + str(shortest_distance[goal]) + ',' + '0' +\n ',\"Claro\",' + '\"S' + start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LM' + start + '_' + goal + ',' + '\"LM' + start + '_' +\n goal + '\",' + str(dj2) + ',' + '0' + ',\"Movistar\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.write('LT' + start + '_' + goal + ',' + '\"LT' + start + '_' +\n goal + '\",' + str(dj3) + ',' + '0' + ',\"Tigo\",' + '\"S' +\n start + '\",' + '\"S' + goal + '\"' + '\\n')\n f.close()\n\n\nmax = len(Graph_Lat)\nfor i in range(max):\n for j in range(max):\n dijkstra_latency(str(i), str(j))\nmax = len(Graph_Lat)\nfor i in range(max):\n f = open('output.txt', 'a+')\n f.write('C' + str(i) + ',S' + str(i) + ',priceController,False' + '\\n')\n f.close()\nfor i in range(max):\n f = open('output.txt', 'a+')\n f.write('S' + str(i) + ',' + str(randint(10000, 500000)) + ',' + str(\n randint(2, 10)) + '\\n')\n f.close()\n",
"step-5": "##############################################################################\n# Nombre : import.py\n# Descripción : It takes the information from Transfom.sh Initial Node\n# Final Node and HAVERSINE Formule\n# \n# Parámetros:\n# Realizado Por : \n#\n# HISTORIAL DE CAMBIOS:\n#Richard Abuabara Caserta\n# \n##############################################################################\nimport re\nfrom collections import defaultdict\n#from pprint import pprint\nfrom random import randint\n\ndata_from_file=open('newAtmnet.txt', 'r').read()\n\ndef transform_to_my_format(data):\n d = defaultdict(dict)\n for (i1, i2, i3) in re.findall(r'([\\d\\.]+)\\s+([\\d\\.]+)\\s+([\\d\\.]+)', data):\n d[i1].update({i2: float(i3)})\n return d\n\nGraph_Lat=transform_to_my_format(data_from_file)\n\ndef dijkstra_latency(start,goal):\n Graph_Lat=transform_to_my_format(data_from_file)\n graph=Graph_Lat\n shortest_distance = {}\n predecessor = {}\n unseenNodes= {}\n unseenNodes = graph\n infinity = 9999999\n path = []\n \n for node in unseenNodes:\n shortest_distance[node] = infinity\n shortest_distance[start] = 0\n \n while unseenNodes:\n minNode = None\n for node in unseenNodes:\n if minNode is None:\n minNode = node\n elif shortest_distance[node] < shortest_distance[minNode]:\n minNode = node\n \n for childNode, weight in graph[minNode].items():\n if weight + shortest_distance[minNode] < shortest_distance[childNode]:\n shortest_distance[childNode] = weight + shortest_distance[minNode]\n predecessor[childNode] = minNode\n unseenNodes.pop(minNode)\n \n currentNode = goal\n while currentNode != start:\n try:\n path.insert(0,currentNode)\n currentNode = predecessor[currentNode]\n except KeyError:\n print('Path not reachable')\n break\n path.insert(0,start)\n if shortest_distance[goal] != infinity:\n dj2=float(shortest_distance[goal])*1.1 #Latencia +/- 10\n dj3=float(shortest_distance[goal])*1.2 #Price +/- 20 Verificar ojooo\n f= open(\"output.txt\",\"a+\")\n if (int(start) != int(goal)):\n f.write('LC'+start+'_'+goal+','+'\"LC'+start+'_'+goal+'\",'+str(shortest_distance[goal])+','+'100'+',\"Claro\",'+'\"S'+start+'\",'+'\"S'+goal+'\"'+ \"\\n\")\n f.write('LM'+start+'_'+goal+','+'\"LM'+start+'_'+goal+'\",'+str(dj2)+','+'75'+',\"Movistar\",'+'\"S'+start+'\",'+'\"S'+goal+'\"'+ \"\\n\")\n f.write('LT'+start+'_'+goal+','+'\"LT'+start+'_'+goal+'\",'+str(dj3)+','+'60'+',\"Tigo\",'+'\"S'+start+'\",'+'\"S'+goal+'\"'+ \"\\n\")\n #f.write('mynet.addLink(LT'+start+'_'+goal+')'+ \"\\n\")\n else:\n f.write('LC'+start+'_'+goal+','+'\"LC'+start+'_'+goal+'\",'+str(shortest_distance[goal])+','+'0'+',\"Claro\",'+'\"S'+start+'\",'+'\"S'+goal+'\"'+ \"\\n\")\n f.write('LM'+start+'_'+goal+','+'\"LM'+start+'_'+goal+'\",'+str(dj2)+','+'0'+',\"Movistar\",'+'\"S'+start+'\",'+'\"S'+goal+'\"'+ \"\\n\")\n f.write('LT'+start+'_'+goal+','+'\"LT'+start+'_'+goal+'\",'+str(dj3)+','+'0'+',\"Tigo\",'+'\"S'+start+'\",'+'\"S'+goal+'\"'+ \"\\n\")\n #f.write('mynet.addLink(LT'+start+'_'+goal+')'+ \"\\n\")\n \n f.close()\n\n####modulo impresion######\nmax=(len(Graph_Lat))\nfor i in range(max): #este es el for - source\n #print (i)\n for j in range(max):\n dijkstra_latency(str(i), str(j)) \n\t#debo imprimir L571=Link(\"L571\",77,770,\"operador1\",5,7) \n########Imprimir 2do Rquerimiento################\n\nmax=(len(Graph_Lat))\n\nfor i in range(max): #este es el for - source\n f= open(\"output.txt\",\"a+\")\n f.write('C'+str(i)+',S'+str(i)+',priceController,False'+\"\\n\")\n f.close()\n\n\n#Switch creation and aggregation\nfor i in range(max): #este es el for - source\n f= open(\"output.txt\",\"a+\")\n #f.write('S'+str(i)+' = Switch(\"S'+str(i)+'\", '+str(randint(10000,500000))+', \"C'+str(i)+'\", '+str(randint(2,10))+')'+\"\\n\")\n f.write('S'+str(i)+','+str(randint(10000,500000))+','+str(randint(2,10))+\"\\n\")\n f.close()\n\n#S0 = Switch(\"S0\", randint(10000,500000), \"C0\", randint(2,10))\n#mynet.addSwitch(S0) \n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print('INR :Rs.', inr, '/-')
<|reserved_special_token_1|>
rate = 69
dollar = int(input('enter an dollars to convert:'))
inr = dollar * rate
print('INR :Rs.', inr, '/-')
<|reserved_special_token_1|>
rate=69
dollar=int(input("enter an dollars to convert:"))
inr=dollar*rate
print('INR :Rs.',inr,'/-')
|
flexible
|
{
"blob_id": "62018b32bf0c66fa7ec3cc0fcbdc16e28b4ef2d6",
"index": 2396,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('INR :Rs.', inr, '/-')\n",
"step-3": "rate = 69\ndollar = int(input('enter an dollars to convert:'))\ninr = dollar * rate\nprint('INR :Rs.', inr, '/-')\n",
"step-4": "rate=69\ndollar=int(input(\"enter an dollars to convert:\"))\ninr=dollar*rate\nprint('INR :Rs.',inr,'/-')",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class testPyTaLib(unittest.TestCase):
def setUp(self):
pass
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class testPyTaLib(unittest.TestCase):
def setUp(self):
pass
<|reserved_special_token_0|>
def testSma(self):
sma = Sma(period=3)
expectedAvgs = [1, 1.5, 2, 3, 4]
for index, number in enumerate(range(1, 6)):
self.assertEqual(expectedAvgs[index], sma(number))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class testPyTaLib(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testSma(self):
sma = Sma(period=3)
expectedAvgs = [1, 1.5, 2, 3, 4]
for index, number in enumerate(range(1, 6)):
self.assertEqual(expectedAvgs[index], sma(number))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import unittest
from ultrafinance.pyTaLib.indicator import Sma
class testPyTaLib(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testSma(self):
sma = Sma(period=3)
expectedAvgs = [1, 1.5, 2, 3, 4]
for index, number in enumerate(range(1, 6)):
self.assertEqual(expectedAvgs[index], sma(number))
<|reserved_special_token_1|>
'''
Created on Dec 18, 2011
@author: ppa
'''
import unittest
from ultrafinance.pyTaLib.indicator import Sma
class testPyTaLib(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testSma(self):
sma = Sma(period = 3)
expectedAvgs = [1, 1.5, 2, 3, 4]
for index, number in enumerate(range(1, 6) ):
self.assertEqual(expectedAvgs[index], sma(number))
|
flexible
|
{
"blob_id": "fcd2bd91dff3193c661d71ade8039765f8498fd4",
"index": 8317,
"step-1": "<mask token>\n\n\nclass testPyTaLib(unittest.TestCase):\n\n def setUp(self):\n pass\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass testPyTaLib(unittest.TestCase):\n\n def setUp(self):\n pass\n <mask token>\n\n def testSma(self):\n sma = Sma(period=3)\n expectedAvgs = [1, 1.5, 2, 3, 4]\n for index, number in enumerate(range(1, 6)):\n self.assertEqual(expectedAvgs[index], sma(number))\n",
"step-3": "<mask token>\n\n\nclass testPyTaLib(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testSma(self):\n sma = Sma(period=3)\n expectedAvgs = [1, 1.5, 2, 3, 4]\n for index, number in enumerate(range(1, 6)):\n self.assertEqual(expectedAvgs[index], sma(number))\n",
"step-4": "<mask token>\nimport unittest\nfrom ultrafinance.pyTaLib.indicator import Sma\n\n\nclass testPyTaLib(unittest.TestCase):\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testSma(self):\n sma = Sma(period=3)\n expectedAvgs = [1, 1.5, 2, 3, 4]\n for index, number in enumerate(range(1, 6)):\n self.assertEqual(expectedAvgs[index], sma(number))\n",
"step-5": "'''\nCreated on Dec 18, 2011\n\n@author: ppa\n'''\nimport unittest\nfrom ultrafinance.pyTaLib.indicator import Sma\n\nclass testPyTaLib(unittest.TestCase):\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testSma(self):\n sma = Sma(period = 3)\n expectedAvgs = [1, 1.5, 2, 3, 4]\n for index, number in enumerate(range(1, 6) ):\n self.assertEqual(expectedAvgs[index], sma(number))\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class cnn:
def __init__(self, maxlen, max_voc, embedweight=None, embedding_dims=
300, batch_size=30, filters=1024, conv_kernel=3, hidden_dim=2048,
epochs=20, output_dim=2, dropout=0.1, trainable=False):
self.epochs = epochs
self.batch_size = batch_size
model = Sequential()
if not isinstance(embedweight, type(None)):
model.add(Embedding(max_voc, embedding_dims, input_length=
maxlen, weights=[embedweight], trainable=trainable))
else:
model.add(Embedding(max_voc, embedding_dims, input_length=maxlen))
model.add(Dropout(dropout))
model.add(Conv1D(filters, conv_kernel, padding='valid', activation=
'relu', strides=1))
model.add(GlobalMaxPooling1D())
model.add(Dense(hidden_dim))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(512))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(128))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(output_dim))
model.add(Activation('softmax'))
opt = optimizers.SGD(lr=0.1, decay=0.0001, momentum=0.9)
model.compile(loss='binary_crossentropy', optimizer=opt, metrics=[
'accuracy'])
self.model = model
<|reserved_special_token_0|>
def fit(self, x_train, y_train, x_valid, y_valid, class_weight=None,
earlyStopping=True):
callback_ = None
if earlyStopping:
callback_ = EarlyStopping(monitor='val_loss', patience=10)
if class_weight:
self.model.fit(x_train, y_train, batch_size=self.batch_size,
epochs=self.epochs, validation_data=(x_valid, y_valid),
class_weight=class_weight, shuffle=True, callbacks=[callback_])
else:
self.model.fit(x_train, y_train, batch_size=self.batch_size,
epochs=self.epochs, validation_data=(x_valid, y_valid),
shuffle=True, callbacks=[callback_])
<|reserved_special_token_0|>
def save_model(self, fpath):
self.model.save(fpath)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class cnn:
def __init__(self, maxlen, max_voc, embedweight=None, embedding_dims=
300, batch_size=30, filters=1024, conv_kernel=3, hidden_dim=2048,
epochs=20, output_dim=2, dropout=0.1, trainable=False):
self.epochs = epochs
self.batch_size = batch_size
model = Sequential()
if not isinstance(embedweight, type(None)):
model.add(Embedding(max_voc, embedding_dims, input_length=
maxlen, weights=[embedweight], trainable=trainable))
else:
model.add(Embedding(max_voc, embedding_dims, input_length=maxlen))
model.add(Dropout(dropout))
model.add(Conv1D(filters, conv_kernel, padding='valid', activation=
'relu', strides=1))
model.add(GlobalMaxPooling1D())
model.add(Dense(hidden_dim))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(512))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(128))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(output_dim))
model.add(Activation('softmax'))
opt = optimizers.SGD(lr=0.1, decay=0.0001, momentum=0.9)
model.compile(loss='binary_crossentropy', optimizer=opt, metrics=[
'accuracy'])
self.model = model
<|reserved_special_token_0|>
def fit(self, x_train, y_train, x_valid, y_valid, class_weight=None,
earlyStopping=True):
callback_ = None
if earlyStopping:
callback_ = EarlyStopping(monitor='val_loss', patience=10)
if class_weight:
self.model.fit(x_train, y_train, batch_size=self.batch_size,
epochs=self.epochs, validation_data=(x_valid, y_valid),
class_weight=class_weight, shuffle=True, callbacks=[callback_])
else:
self.model.fit(x_train, y_train, batch_size=self.batch_size,
epochs=self.epochs, validation_data=(x_valid, y_valid),
shuffle=True, callbacks=[callback_])
<|reserved_special_token_0|>
def save_model(self, fpath):
self.model.save(fpath)
def predict(self, test_x):
return self.model.predict(test_x)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class cnn:
def __init__(self, maxlen, max_voc, embedweight=None, embedding_dims=
300, batch_size=30, filters=1024, conv_kernel=3, hidden_dim=2048,
epochs=20, output_dim=2, dropout=0.1, trainable=False):
self.epochs = epochs
self.batch_size = batch_size
model = Sequential()
if not isinstance(embedweight, type(None)):
model.add(Embedding(max_voc, embedding_dims, input_length=
maxlen, weights=[embedweight], trainable=trainable))
else:
model.add(Embedding(max_voc, embedding_dims, input_length=maxlen))
model.add(Dropout(dropout))
model.add(Conv1D(filters, conv_kernel, padding='valid', activation=
'relu', strides=1))
model.add(GlobalMaxPooling1D())
model.add(Dense(hidden_dim))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(512))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(128))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(output_dim))
model.add(Activation('softmax'))
opt = optimizers.SGD(lr=0.1, decay=0.0001, momentum=0.9)
model.compile(loss='binary_crossentropy', optimizer=opt, metrics=[
'accuracy'])
self.model = model
@staticmethod
def padding(x, maxlen):
return sequence.pad_sequences(x, maxlen=maxlen)
def fit(self, x_train, y_train, x_valid, y_valid, class_weight=None,
earlyStopping=True):
callback_ = None
if earlyStopping:
callback_ = EarlyStopping(monitor='val_loss', patience=10)
if class_weight:
self.model.fit(x_train, y_train, batch_size=self.batch_size,
epochs=self.epochs, validation_data=(x_valid, y_valid),
class_weight=class_weight, shuffle=True, callbacks=[callback_])
else:
self.model.fit(x_train, y_train, batch_size=self.batch_size,
epochs=self.epochs, validation_data=(x_valid, y_valid),
shuffle=True, callbacks=[callback_])
def load_weight(self, fadd):
self.model.load_weights(fadd)
def save_model(self, fpath):
self.model.save(fpath)
def predict(self, test_x):
return self.model.predict(test_x)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.layers import Embedding
from keras.layers import Conv1D, GlobalMaxPooling1D
from keras import optimizers
from keras.callbacks import EarlyStopping
class cnn:
def __init__(self, maxlen, max_voc, embedweight=None, embedding_dims=
300, batch_size=30, filters=1024, conv_kernel=3, hidden_dim=2048,
epochs=20, output_dim=2, dropout=0.1, trainable=False):
self.epochs = epochs
self.batch_size = batch_size
model = Sequential()
if not isinstance(embedweight, type(None)):
model.add(Embedding(max_voc, embedding_dims, input_length=
maxlen, weights=[embedweight], trainable=trainable))
else:
model.add(Embedding(max_voc, embedding_dims, input_length=maxlen))
model.add(Dropout(dropout))
model.add(Conv1D(filters, conv_kernel, padding='valid', activation=
'relu', strides=1))
model.add(GlobalMaxPooling1D())
model.add(Dense(hidden_dim))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(512))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(128))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(output_dim))
model.add(Activation('softmax'))
opt = optimizers.SGD(lr=0.1, decay=0.0001, momentum=0.9)
model.compile(loss='binary_crossentropy', optimizer=opt, metrics=[
'accuracy'])
self.model = model
@staticmethod
def padding(x, maxlen):
return sequence.pad_sequences(x, maxlen=maxlen)
def fit(self, x_train, y_train, x_valid, y_valid, class_weight=None,
earlyStopping=True):
callback_ = None
if earlyStopping:
callback_ = EarlyStopping(monitor='val_loss', patience=10)
if class_weight:
self.model.fit(x_train, y_train, batch_size=self.batch_size,
epochs=self.epochs, validation_data=(x_valid, y_valid),
class_weight=class_weight, shuffle=True, callbacks=[callback_])
else:
self.model.fit(x_train, y_train, batch_size=self.batch_size,
epochs=self.epochs, validation_data=(x_valid, y_valid),
shuffle=True, callbacks=[callback_])
def load_weight(self, fadd):
self.model.load_weights(fadd)
def save_model(self, fpath):
self.model.save(fpath)
def predict(self, test_x):
return self.model.predict(test_x)
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Sun Sep 10 12:18:06 2017
@author: wqmike123
"""
#%% build a simple CNN with gloVec as initial
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.layers import Embedding
from keras.layers import Conv1D, GlobalMaxPooling1D
from keras import optimizers
from keras.callbacks import EarlyStopping
#%%
class cnn:
def __init__(self,maxlen,max_voc,embedweight = None,embedding_dims = 300, batch_size = 30,\
filters = 1024, conv_kernel = 3,hidden_dim = 2048,epochs = 20,\
output_dim = 2,dropout = 0.1,trainable=False):
self.epochs = epochs
self.batch_size = batch_size
model = Sequential()
# we start off with an efficient embedding layer which maps
# our vocab indices into embedding_dims dimensions
if not isinstance(embedweight,type(None)):
model.add(Embedding(max_voc,
embedding_dims,
input_length=maxlen,weights = [embedweight],trainable = trainable))
else:
model.add(Embedding(max_voc,
embedding_dims,
input_length=maxlen))
model.add(Dropout(dropout))
# we add a Convolution1D, which will learn filters
# word group filters of size filter_length:
model.add(Conv1D(filters,
conv_kernel,
padding='valid',
activation='relu',
strides=1))
# we use max pooling:
model.add(GlobalMaxPooling1D())
# We add a vanilla hidden layer:
model.add(Dense(hidden_dim))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(512))
model.add(Dropout(dropout))
model.add(Activation('relu'))
model.add(Dense(128))
model.add(Dropout(dropout))
model.add(Activation('relu'))
# We project onto a single unit output layer, and squash it with a sigmoid:
model.add(Dense(output_dim))
model.add(Activation('softmax'))
opt = optimizers.SGD(lr=0.1,decay = 1e-4,momentum=0.9) #optimizers.adam(lr=0.01, decay=1e-6)
model.compile(loss='binary_crossentropy',
optimizer=opt,
metrics=['accuracy'])
self.model = model
@staticmethod
def padding(x,maxlen):
return sequence.pad_sequences(x, maxlen=maxlen)
def fit(self,x_train,y_train,x_valid,y_valid,class_weight = None,earlyStopping = True):
callback_ = None
if earlyStopping:
callback_ = EarlyStopping(monitor='val_loss', patience=10)
if class_weight:
self.model.fit(x_train, y_train,
batch_size=self.batch_size,
epochs=self.epochs,
validation_data=(x_valid, y_valid),class_weight = class_weight, shuffle=True,callbacks=[callback_])
else:
self.model.fit(x_train, y_train,
batch_size=self.batch_size,
epochs=self.epochs,
validation_data=(x_valid, y_valid), shuffle=True,callbacks=[callback_])
# def fit(self,x_train,y_train,x_valid,y_valid,class_weight = None):
# if class_weight:
# self.model.fit(x_train, y_train,
# batch_size=self.batch_size,
# epochs=self.epochs,
# validation_data=(x_valid, y_valid),class_weight = class_weight)
# else:
# self.model.fit(x_train, y_train,
# batch_size=self.batch_size,
# epochs=self.epochs,
# validation_data=(x_valid, y_valid))
def load_weight(self,fadd):
self.model.load_weights(fadd)
def save_model(self,fpath):
self.model.save(fpath)
def predict(self,test_x):
return self.model.predict(test_x)
|
flexible
|
{
"blob_id": "e235be879cf8a00eb9f39f90859689a29b26f1c6",
"index": 3161,
"step-1": "<mask token>\n\n\nclass cnn:\n\n def __init__(self, maxlen, max_voc, embedweight=None, embedding_dims=\n 300, batch_size=30, filters=1024, conv_kernel=3, hidden_dim=2048,\n epochs=20, output_dim=2, dropout=0.1, trainable=False):\n self.epochs = epochs\n self.batch_size = batch_size\n model = Sequential()\n if not isinstance(embedweight, type(None)):\n model.add(Embedding(max_voc, embedding_dims, input_length=\n maxlen, weights=[embedweight], trainable=trainable))\n else:\n model.add(Embedding(max_voc, embedding_dims, input_length=maxlen))\n model.add(Dropout(dropout))\n model.add(Conv1D(filters, conv_kernel, padding='valid', activation=\n 'relu', strides=1))\n model.add(GlobalMaxPooling1D())\n model.add(Dense(hidden_dim))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(512))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(128))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(output_dim))\n model.add(Activation('softmax'))\n opt = optimizers.SGD(lr=0.1, decay=0.0001, momentum=0.9)\n model.compile(loss='binary_crossentropy', optimizer=opt, metrics=[\n 'accuracy'])\n self.model = model\n <mask token>\n\n def fit(self, x_train, y_train, x_valid, y_valid, class_weight=None,\n earlyStopping=True):\n callback_ = None\n if earlyStopping:\n callback_ = EarlyStopping(monitor='val_loss', patience=10)\n if class_weight:\n self.model.fit(x_train, y_train, batch_size=self.batch_size,\n epochs=self.epochs, validation_data=(x_valid, y_valid),\n class_weight=class_weight, shuffle=True, callbacks=[callback_])\n else:\n self.model.fit(x_train, y_train, batch_size=self.batch_size,\n epochs=self.epochs, validation_data=(x_valid, y_valid),\n shuffle=True, callbacks=[callback_])\n <mask token>\n\n def save_model(self, fpath):\n self.model.save(fpath)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass cnn:\n\n def __init__(self, maxlen, max_voc, embedweight=None, embedding_dims=\n 300, batch_size=30, filters=1024, conv_kernel=3, hidden_dim=2048,\n epochs=20, output_dim=2, dropout=0.1, trainable=False):\n self.epochs = epochs\n self.batch_size = batch_size\n model = Sequential()\n if not isinstance(embedweight, type(None)):\n model.add(Embedding(max_voc, embedding_dims, input_length=\n maxlen, weights=[embedweight], trainable=trainable))\n else:\n model.add(Embedding(max_voc, embedding_dims, input_length=maxlen))\n model.add(Dropout(dropout))\n model.add(Conv1D(filters, conv_kernel, padding='valid', activation=\n 'relu', strides=1))\n model.add(GlobalMaxPooling1D())\n model.add(Dense(hidden_dim))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(512))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(128))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(output_dim))\n model.add(Activation('softmax'))\n opt = optimizers.SGD(lr=0.1, decay=0.0001, momentum=0.9)\n model.compile(loss='binary_crossentropy', optimizer=opt, metrics=[\n 'accuracy'])\n self.model = model\n <mask token>\n\n def fit(self, x_train, y_train, x_valid, y_valid, class_weight=None,\n earlyStopping=True):\n callback_ = None\n if earlyStopping:\n callback_ = EarlyStopping(monitor='val_loss', patience=10)\n if class_weight:\n self.model.fit(x_train, y_train, batch_size=self.batch_size,\n epochs=self.epochs, validation_data=(x_valid, y_valid),\n class_weight=class_weight, shuffle=True, callbacks=[callback_])\n else:\n self.model.fit(x_train, y_train, batch_size=self.batch_size,\n epochs=self.epochs, validation_data=(x_valid, y_valid),\n shuffle=True, callbacks=[callback_])\n <mask token>\n\n def save_model(self, fpath):\n self.model.save(fpath)\n\n def predict(self, test_x):\n return self.model.predict(test_x)\n",
"step-3": "<mask token>\n\n\nclass cnn:\n\n def __init__(self, maxlen, max_voc, embedweight=None, embedding_dims=\n 300, batch_size=30, filters=1024, conv_kernel=3, hidden_dim=2048,\n epochs=20, output_dim=2, dropout=0.1, trainable=False):\n self.epochs = epochs\n self.batch_size = batch_size\n model = Sequential()\n if not isinstance(embedweight, type(None)):\n model.add(Embedding(max_voc, embedding_dims, input_length=\n maxlen, weights=[embedweight], trainable=trainable))\n else:\n model.add(Embedding(max_voc, embedding_dims, input_length=maxlen))\n model.add(Dropout(dropout))\n model.add(Conv1D(filters, conv_kernel, padding='valid', activation=\n 'relu', strides=1))\n model.add(GlobalMaxPooling1D())\n model.add(Dense(hidden_dim))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(512))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(128))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(output_dim))\n model.add(Activation('softmax'))\n opt = optimizers.SGD(lr=0.1, decay=0.0001, momentum=0.9)\n model.compile(loss='binary_crossentropy', optimizer=opt, metrics=[\n 'accuracy'])\n self.model = model\n\n @staticmethod\n def padding(x, maxlen):\n return sequence.pad_sequences(x, maxlen=maxlen)\n\n def fit(self, x_train, y_train, x_valid, y_valid, class_weight=None,\n earlyStopping=True):\n callback_ = None\n if earlyStopping:\n callback_ = EarlyStopping(monitor='val_loss', patience=10)\n if class_weight:\n self.model.fit(x_train, y_train, batch_size=self.batch_size,\n epochs=self.epochs, validation_data=(x_valid, y_valid),\n class_weight=class_weight, shuffle=True, callbacks=[callback_])\n else:\n self.model.fit(x_train, y_train, batch_size=self.batch_size,\n epochs=self.epochs, validation_data=(x_valid, y_valid),\n shuffle=True, callbacks=[callback_])\n\n def load_weight(self, fadd):\n self.model.load_weights(fadd)\n\n def save_model(self, fpath):\n self.model.save(fpath)\n\n def predict(self, test_x):\n return self.model.predict(test_x)\n",
"step-4": "<mask token>\nfrom keras.preprocessing import sequence\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Dropout, Activation\nfrom keras.layers import Embedding\nfrom keras.layers import Conv1D, GlobalMaxPooling1D\nfrom keras import optimizers\nfrom keras.callbacks import EarlyStopping\n\n\nclass cnn:\n\n def __init__(self, maxlen, max_voc, embedweight=None, embedding_dims=\n 300, batch_size=30, filters=1024, conv_kernel=3, hidden_dim=2048,\n epochs=20, output_dim=2, dropout=0.1, trainable=False):\n self.epochs = epochs\n self.batch_size = batch_size\n model = Sequential()\n if not isinstance(embedweight, type(None)):\n model.add(Embedding(max_voc, embedding_dims, input_length=\n maxlen, weights=[embedweight], trainable=trainable))\n else:\n model.add(Embedding(max_voc, embedding_dims, input_length=maxlen))\n model.add(Dropout(dropout))\n model.add(Conv1D(filters, conv_kernel, padding='valid', activation=\n 'relu', strides=1))\n model.add(GlobalMaxPooling1D())\n model.add(Dense(hidden_dim))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(512))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(128))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n model.add(Dense(output_dim))\n model.add(Activation('softmax'))\n opt = optimizers.SGD(lr=0.1, decay=0.0001, momentum=0.9)\n model.compile(loss='binary_crossentropy', optimizer=opt, metrics=[\n 'accuracy'])\n self.model = model\n\n @staticmethod\n def padding(x, maxlen):\n return sequence.pad_sequences(x, maxlen=maxlen)\n\n def fit(self, x_train, y_train, x_valid, y_valid, class_weight=None,\n earlyStopping=True):\n callback_ = None\n if earlyStopping:\n callback_ = EarlyStopping(monitor='val_loss', patience=10)\n if class_weight:\n self.model.fit(x_train, y_train, batch_size=self.batch_size,\n epochs=self.epochs, validation_data=(x_valid, y_valid),\n class_weight=class_weight, shuffle=True, callbacks=[callback_])\n else:\n self.model.fit(x_train, y_train, batch_size=self.batch_size,\n epochs=self.epochs, validation_data=(x_valid, y_valid),\n shuffle=True, callbacks=[callback_])\n\n def load_weight(self, fadd):\n self.model.load_weights(fadd)\n\n def save_model(self, fpath):\n self.model.save(fpath)\n\n def predict(self, test_x):\n return self.model.predict(test_x)\n",
"step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun Sep 10 12:18:06 2017\n\n@author: wqmike123\n\"\"\"\n#%% build a simple CNN with gloVec as initial\nfrom keras.preprocessing import sequence\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Dropout, Activation\nfrom keras.layers import Embedding\nfrom keras.layers import Conv1D, GlobalMaxPooling1D\nfrom keras import optimizers\nfrom keras.callbacks import EarlyStopping\n#%%\nclass cnn:\n\n def __init__(self,maxlen,max_voc,embedweight = None,embedding_dims = 300, batch_size = 30,\\\n filters = 1024, conv_kernel = 3,hidden_dim = 2048,epochs = 20,\\\n output_dim = 2,dropout = 0.1,trainable=False):\n\n self.epochs = epochs\n self.batch_size = batch_size\n model = Sequential()\n \n # we start off with an efficient embedding layer which maps\n # our vocab indices into embedding_dims dimensions\n if not isinstance(embedweight,type(None)):\n model.add(Embedding(max_voc,\n embedding_dims,\n input_length=maxlen,weights = [embedweight],trainable = trainable))\n else:\n model.add(Embedding(max_voc,\n embedding_dims,\n input_length=maxlen)) \n model.add(Dropout(dropout))\n \n # we add a Convolution1D, which will learn filters\n # word group filters of size filter_length:\n model.add(Conv1D(filters,\n conv_kernel,\n padding='valid',\n activation='relu',\n strides=1))\n # we use max pooling:\n model.add(GlobalMaxPooling1D())\n \n # We add a vanilla hidden layer:\n model.add(Dense(hidden_dim))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n \n model.add(Dense(512))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n \n model.add(Dense(128))\n model.add(Dropout(dropout))\n model.add(Activation('relu'))\n \n # We project onto a single unit output layer, and squash it with a sigmoid:\n model.add(Dense(output_dim))\n model.add(Activation('softmax'))\n opt = optimizers.SGD(lr=0.1,decay = 1e-4,momentum=0.9) #optimizers.adam(lr=0.01, decay=1e-6)\n model.compile(loss='binary_crossentropy',\n optimizer=opt,\n metrics=['accuracy'])\n self.model = model\n \n @staticmethod\n def padding(x,maxlen):\n return sequence.pad_sequences(x, maxlen=maxlen) \n \n def fit(self,x_train,y_train,x_valid,y_valid,class_weight = None,earlyStopping = True):\n callback_ = None\n if earlyStopping:\n callback_ = EarlyStopping(monitor='val_loss', patience=10)\n if class_weight:\n self.model.fit(x_train, y_train,\n batch_size=self.batch_size,\n epochs=self.epochs,\n validation_data=(x_valid, y_valid),class_weight = class_weight, shuffle=True,callbacks=[callback_])\n else:\n self.model.fit(x_train, y_train,\n batch_size=self.batch_size,\n epochs=self.epochs,\n validation_data=(x_valid, y_valid), shuffle=True,callbacks=[callback_]) \n# def fit(self,x_train,y_train,x_valid,y_valid,class_weight = None):\n# if class_weight:\n# self.model.fit(x_train, y_train,\n# batch_size=self.batch_size,\n# epochs=self.epochs,\n# validation_data=(x_valid, y_valid),class_weight = class_weight)\n# else:\n# self.model.fit(x_train, y_train,\n# batch_size=self.batch_size,\n# epochs=self.epochs,\n# validation_data=(x_valid, y_valid)) \n def load_weight(self,fadd):\n self.model.load_weights(fadd)\n \n def save_model(self,fpath):\n self.model.save(fpath)\n \n def predict(self,test_x):\n return self.model.predict(test_x)\n ",
"step-ids": [
4,
5,
7,
8,
9
]
}
|
[
4,
5,
7,
8,
9
] |
from app.exceptions import UserAlreadyExist, UserDoesNotExist
class Accounts(object):
""" Creates an Account where users can be stored """
def __init__(self):
self.users = {}
def add_user(self, user):
if user.id in self.users:
raise UserAlreadyExist
else:
self.users.update({user.id: user})
def remove_user(self, email):
"""This Method removes a user from users dictonary using his/her
unique email"""
try:
self.users.pop(email)
except KeyError:
raise UserDoesNotExist
def check_user(self, email):
if email in self.users:
return self.users[email]
def all_users(self):
return self.users
|
normal
|
{
"blob_id": "88cc4ae4137cf9c0e9c39874b36f7a2770550f96",
"index": 5431,
"step-1": "<mask token>\n\n\nclass Accounts(object):\n <mask token>\n\n def __init__(self):\n self.users = {}\n\n def add_user(self, user):\n if user.id in self.users:\n raise UserAlreadyExist\n else:\n self.users.update({user.id: user})\n <mask token>\n\n def check_user(self, email):\n if email in self.users:\n return self.users[email]\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Accounts(object):\n <mask token>\n\n def __init__(self):\n self.users = {}\n\n def add_user(self, user):\n if user.id in self.users:\n raise UserAlreadyExist\n else:\n self.users.update({user.id: user})\n <mask token>\n\n def check_user(self, email):\n if email in self.users:\n return self.users[email]\n\n def all_users(self):\n return self.users\n",
"step-3": "<mask token>\n\n\nclass Accounts(object):\n <mask token>\n\n def __init__(self):\n self.users = {}\n\n def add_user(self, user):\n if user.id in self.users:\n raise UserAlreadyExist\n else:\n self.users.update({user.id: user})\n\n def remove_user(self, email):\n \"\"\"This Method removes a user from users dictonary using his/her\n unique email\"\"\"\n try:\n self.users.pop(email)\n except KeyError:\n raise UserDoesNotExist\n\n def check_user(self, email):\n if email in self.users:\n return self.users[email]\n\n def all_users(self):\n return self.users\n",
"step-4": "<mask token>\n\n\nclass Accounts(object):\n \"\"\" Creates an Account where users can be stored \"\"\"\n\n def __init__(self):\n self.users = {}\n\n def add_user(self, user):\n if user.id in self.users:\n raise UserAlreadyExist\n else:\n self.users.update({user.id: user})\n\n def remove_user(self, email):\n \"\"\"This Method removes a user from users dictonary using his/her\n unique email\"\"\"\n try:\n self.users.pop(email)\n except KeyError:\n raise UserDoesNotExist\n\n def check_user(self, email):\n if email in self.users:\n return self.users[email]\n\n def all_users(self):\n return self.users\n",
"step-5": "from app.exceptions import UserAlreadyExist, UserDoesNotExist\n\nclass Accounts(object):\n \"\"\" Creates an Account where users can be stored \"\"\"\n\n def __init__(self):\n self.users = {}\n\n def add_user(self, user):\n if user.id in self.users:\n raise UserAlreadyExist\n else:\n self.users.update({user.id: user})\n \n def remove_user(self, email):\n \"\"\"This Method removes a user from users dictonary using his/her\n unique email\"\"\"\n try:\n self.users.pop(email)\n except KeyError:\n raise UserDoesNotExist\n\n def check_user(self, email):\n if email in self.users:\n return self.users[email]\n\n def all_users(self):\n return self.users",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
from django.urls import path
from django.conf.urls import include, url
from . import views
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
appname = 'home'
urlpatterns = [
path('', views.home, name='home'),
]
urlpatterns += staticfiles_urlpatterns()
|
normal
|
{
"blob_id": "dd23cd068eea570fc187dad2d49b30376fbd4854",
"index": 4856,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nurlpatterns += staticfiles_urlpatterns()\n",
"step-3": "<mask token>\nappname = 'home'\nurlpatterns = [path('', views.home, name='home')]\nurlpatterns += staticfiles_urlpatterns()\n",
"step-4": "from django.urls import path\nfrom django.conf.urls import include, url\nfrom . import views\nfrom django.conf.urls.static import static\nfrom django.contrib.staticfiles.urls import staticfiles_urlpatterns\nappname = 'home'\nurlpatterns = [path('', views.home, name='home')]\nurlpatterns += staticfiles_urlpatterns()\n",
"step-5": "from django.urls import path\nfrom django.conf.urls import include, url\nfrom . import views\nfrom django.conf.urls.static import static\nfrom django.contrib.staticfiles.urls import staticfiles_urlpatterns\n\nappname = 'home'\nurlpatterns = [\n path('', views.home, name='home'),\n]\nurlpatterns += staticfiles_urlpatterns()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#!/usr/bin/env python
from __future__ import print_function
from types_ import SimpleObject, SimpleObjectImmutable, NamedTuple, SimpleTuple, c_struct
import timeit
import random
TYPES = [
SimpleObjectImmutable,
SimpleObject,
NamedTuple,
SimpleTuple,
c_struct,
]
a = 1035
b = b'\x54 - fo!'
c = [1, 5, 66, ]
def measure_creation():
random.shuffle(TYPES)
for type_ in TYPES:
pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)
body = '{}(a, b, c)'.format(type_.__name__)
print('\t', type_.__name__, timeit.repeat(stmt=body, setup=pre, repeat=5))
def test_immut():
'''Verifies that the type called SimpleObjectImmutable
actually satisfies that definition.
'''
from types_ import read_only
q = SimpleObjectImmutable(a, b, c)
SimpleObjectImmutable.__setattr__ = read_only
try:
q.a = 1
assert(False)
except ValueError:
assert(True)
if __name__ == '__main__':
measure_creation()
test_immut()
|
normal
|
{
"blob_id": "ba73562cd8ffa52a1fede35c3325e7e76a6dad54",
"index": 7966,
"step-1": "<mask token>\n\n\ndef measure_creation():\n random.shuffle(TYPES)\n for type_ in TYPES:\n pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)\n body = '{}(a, b, c)'.format(type_.__name__)\n print('\\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,\n repeat=5))\n\n\ndef test_immut():\n \"\"\"Verifies that the type called SimpleObjectImmutable\n actually satisfies that definition.\n \"\"\"\n from types_ import read_only\n q = SimpleObjectImmutable(a, b, c)\n SimpleObjectImmutable.__setattr__ = read_only\n try:\n q.a = 1\n assert False\n except ValueError:\n assert True\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef measure_creation():\n random.shuffle(TYPES)\n for type_ in TYPES:\n pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)\n body = '{}(a, b, c)'.format(type_.__name__)\n print('\\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,\n repeat=5))\n\n\ndef test_immut():\n \"\"\"Verifies that the type called SimpleObjectImmutable\n actually satisfies that definition.\n \"\"\"\n from types_ import read_only\n q = SimpleObjectImmutable(a, b, c)\n SimpleObjectImmutable.__setattr__ = read_only\n try:\n q.a = 1\n assert False\n except ValueError:\n assert True\n\n\nif __name__ == '__main__':\n measure_creation()\n test_immut()\n",
"step-3": "<mask token>\nTYPES = [SimpleObjectImmutable, SimpleObject, NamedTuple, SimpleTuple, c_struct\n ]\na = 1035\nb = b'T - fo!'\nc = [1, 5, 66]\n\n\ndef measure_creation():\n random.shuffle(TYPES)\n for type_ in TYPES:\n pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)\n body = '{}(a, b, c)'.format(type_.__name__)\n print('\\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,\n repeat=5))\n\n\ndef test_immut():\n \"\"\"Verifies that the type called SimpleObjectImmutable\n actually satisfies that definition.\n \"\"\"\n from types_ import read_only\n q = SimpleObjectImmutable(a, b, c)\n SimpleObjectImmutable.__setattr__ = read_only\n try:\n q.a = 1\n assert False\n except ValueError:\n assert True\n\n\nif __name__ == '__main__':\n measure_creation()\n test_immut()\n",
"step-4": "from __future__ import print_function\nfrom types_ import SimpleObject, SimpleObjectImmutable, NamedTuple, SimpleTuple, c_struct\nimport timeit\nimport random\nTYPES = [SimpleObjectImmutable, SimpleObject, NamedTuple, SimpleTuple, c_struct\n ]\na = 1035\nb = b'T - fo!'\nc = [1, 5, 66]\n\n\ndef measure_creation():\n random.shuffle(TYPES)\n for type_ in TYPES:\n pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)\n body = '{}(a, b, c)'.format(type_.__name__)\n print('\\t', type_.__name__, timeit.repeat(stmt=body, setup=pre,\n repeat=5))\n\n\ndef test_immut():\n \"\"\"Verifies that the type called SimpleObjectImmutable\n actually satisfies that definition.\n \"\"\"\n from types_ import read_only\n q = SimpleObjectImmutable(a, b, c)\n SimpleObjectImmutable.__setattr__ = read_only\n try:\n q.a = 1\n assert False\n except ValueError:\n assert True\n\n\nif __name__ == '__main__':\n measure_creation()\n test_immut()\n",
"step-5": "#!/usr/bin/env python\n\nfrom __future__ import print_function\n\nfrom types_ import SimpleObject, SimpleObjectImmutable, NamedTuple, SimpleTuple, c_struct\nimport timeit\nimport random\n\nTYPES = [\n SimpleObjectImmutable,\n SimpleObject,\n NamedTuple,\n SimpleTuple,\n c_struct,\n ]\n\na = 1035\nb = b'\\x54 - fo!'\nc = [1, 5, 66, ]\n\ndef measure_creation():\n random.shuffle(TYPES)\n\n for type_ in TYPES:\n pre = 'from __main__ import {}, a, b, c'.format(type_.__name__)\n body = '{}(a, b, c)'.format(type_.__name__)\n print('\\t', type_.__name__, timeit.repeat(stmt=body, setup=pre, repeat=5))\n\n\ndef test_immut():\n '''Verifies that the type called SimpleObjectImmutable\n actually satisfies that definition.\n '''\n from types_ import read_only\n\n q = SimpleObjectImmutable(a, b, c)\n SimpleObjectImmutable.__setattr__ = read_only\n try:\n q.a = 1\n assert(False)\n except ValueError:\n assert(True)\n\nif __name__ == '__main__':\n measure_creation()\n\n test_immut()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(' Coefficients:')
print(' Coefficient of a = ', a)
print(' Coefficient of b = ', b)
print(' Coefficient of c = ', c)
<|reserved_special_token_0|>
print('The roots of the equation:')
print(' Root 1 =', root_1)
print(' Root 2 =', root_2)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
a = float(input('Enter the coeddicient a: '))
b = float(input('Enter the coeddicient b: '))
c = float(input('Enter the coeddicient c: '))
print(' Coefficients:')
print(' Coefficient of a = ', a)
print(' Coefficient of b = ', b)
print(' Coefficient of c = ', c)
root_1 = (-b + (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)
root_2 = (-b - (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)
print('The roots of the equation:')
print(' Root 1 =', root_1)
print(' Root 2 =', root_2)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import math
a = float(input('Enter the coeddicient a: '))
b = float(input('Enter the coeddicient b: '))
c = float(input('Enter the coeddicient c: '))
print(' Coefficients:')
print(' Coefficient of a = ', a)
print(' Coefficient of b = ', b)
print(' Coefficient of c = ', c)
root_1 = (-b + (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)
root_2 = (-b - (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)
print('The roots of the equation:')
print(' Root 1 =', root_1)
print(' Root 2 =', root_2)
<|reserved_special_token_1|>
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Aug 31 14:35:49 2019
@author: devinpowers
"""
# Lab 1 in CSE 231
#Quadratic Formula
# Find the roots in the Quadratic Formula
import math
a = float(input("Enter the coeddicient a: "))
b = float(input("Enter the coeddicient b: "))
c = float(input("Enter the coeddicient c: "))
print (" Coefficients:")
print( " Coefficient of a = ", a)
print( " Coefficient of b = ", b)
print( " Coefficient of c = ", c)
root_1 = (-b+(b**2-4*a*c)**(0.5))/(2*a)
root_2 = (-b-(b**2-4*a*c)**(0.5))/(2*a)
print("The roots of the equation:")
print( " Root 1 =", root_1)
print( " Root 2 =", root_2)
|
flexible
|
{
"blob_id": "2acfd0bbad68bb9d55aeb39b180f4326a225f6d5",
"index": 1218,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(' Coefficients:')\nprint(' Coefficient of a = ', a)\nprint(' Coefficient of b = ', b)\nprint(' Coefficient of c = ', c)\n<mask token>\nprint('The roots of the equation:')\nprint(' Root 1 =', root_1)\nprint(' Root 2 =', root_2)\n",
"step-3": "<mask token>\na = float(input('Enter the coeddicient a: '))\nb = float(input('Enter the coeddicient b: '))\nc = float(input('Enter the coeddicient c: '))\nprint(' Coefficients:')\nprint(' Coefficient of a = ', a)\nprint(' Coefficient of b = ', b)\nprint(' Coefficient of c = ', c)\nroot_1 = (-b + (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)\nroot_2 = (-b - (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)\nprint('The roots of the equation:')\nprint(' Root 1 =', root_1)\nprint(' Root 2 =', root_2)\n",
"step-4": "<mask token>\nimport math\na = float(input('Enter the coeddicient a: '))\nb = float(input('Enter the coeddicient b: '))\nc = float(input('Enter the coeddicient c: '))\nprint(' Coefficients:')\nprint(' Coefficient of a = ', a)\nprint(' Coefficient of b = ', b)\nprint(' Coefficient of c = ', c)\nroot_1 = (-b + (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)\nroot_2 = (-b - (b ** 2 - 4 * a * c) ** 0.5) / (2 * a)\nprint('The roots of the equation:')\nprint(' Root 1 =', root_1)\nprint(' Root 2 =', root_2)\n",
"step-5": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sat Aug 31 14:35:49 2019\n\n@author: devinpowers\n\"\"\"\n\n# Lab 1 in CSE 231\n#Quadratic Formula\n# Find the roots in the Quadratic Formula\n \nimport math\n\na = float(input(\"Enter the coeddicient a: \"))\nb = float(input(\"Enter the coeddicient b: \"))\nc = float(input(\"Enter the coeddicient c: \"))\n\nprint (\" Coefficients:\")\nprint( \" Coefficient of a = \", a)\nprint( \" Coefficient of b = \", b)\nprint( \" Coefficient of c = \", c)\n\nroot_1 = (-b+(b**2-4*a*c)**(0.5))/(2*a)\nroot_2 = (-b-(b**2-4*a*c)**(0.5))/(2*a)\n\nprint(\"The roots of the equation:\")\nprint( \" Root 1 =\", root_1)\nprint( \" Root 2 =\", root_2)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class Paddle(Turtle):
<|reserved_special_token_0|>
def up(self):
y_pos = self.ycor()
x_pos = self.xcor()
self.goto(y=y_pos + 20, x=x_pos)
def down(self):
y_pos = self.ycor()
x_pos = self.xcor()
self.goto(y=y_pos - 20, x=x_pos)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Paddle(Turtle):
<|reserved_special_token_0|>
def up(self):
y_pos = self.ycor()
x_pos = self.xcor()
self.goto(y=y_pos + 20, x=x_pos)
def down(self):
y_pos = self.ycor()
x_pos = self.xcor()
self.goto(y=y_pos - 20, x=x_pos)
def increase_score(self):
self.score += 1
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Paddle(Turtle):
def __init__(self, x_position, y_position):
super().__init__()
self.shape('square')
self.shapesize(stretch_wid=5, stretch_len=1)
self.penup()
self.color('white')
self.goto(x=x_position, y=y_position)
self.speed('fastest')
self.score = 0
def up(self):
y_pos = self.ycor()
x_pos = self.xcor()
self.goto(y=y_pos + 20, x=x_pos)
def down(self):
y_pos = self.ycor()
x_pos = self.xcor()
self.goto(y=y_pos - 20, x=x_pos)
def increase_score(self):
self.score += 1
<|reserved_special_token_1|>
from turtle import Turtle
class Paddle(Turtle):
def __init__(self, x_position, y_position):
super().__init__()
self.shape('square')
self.shapesize(stretch_wid=5, stretch_len=1)
self.penup()
self.color('white')
self.goto(x=x_position, y=y_position)
self.speed('fastest')
self.score = 0
def up(self):
y_pos = self.ycor()
x_pos = self.xcor()
self.goto(y=y_pos + 20, x=x_pos)
def down(self):
y_pos = self.ycor()
x_pos = self.xcor()
self.goto(y=y_pos - 20, x=x_pos)
def increase_score(self):
self.score += 1
<|reserved_special_token_1|>
from turtle import Turtle
class Paddle(Turtle):
def __init__(self, x_position, y_position):
super().__init__()
self.shape('square')
self.shapesize(stretch_wid=5, stretch_len=1)
self.penup()
self.color("white")
self.goto(x=x_position, y=y_position)
self.speed("fastest")
self.score = 0
def up(self):
y_pos = self.ycor()
x_pos = self.xcor()
self.goto(y=y_pos + 20, x=x_pos)
def down(self):
y_pos = self.ycor()
x_pos = self.xcor()
self.goto(y=y_pos - 20, x=x_pos)
def increase_score(self):
self.score += 1
|
flexible
|
{
"blob_id": "f49b80d0b8b42bafc787a36d0a8be98ab7fa53e7",
"index": 3558,
"step-1": "<mask token>\n\n\nclass Paddle(Turtle):\n <mask token>\n\n def up(self):\n y_pos = self.ycor()\n x_pos = self.xcor()\n self.goto(y=y_pos + 20, x=x_pos)\n\n def down(self):\n y_pos = self.ycor()\n x_pos = self.xcor()\n self.goto(y=y_pos - 20, x=x_pos)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Paddle(Turtle):\n <mask token>\n\n def up(self):\n y_pos = self.ycor()\n x_pos = self.xcor()\n self.goto(y=y_pos + 20, x=x_pos)\n\n def down(self):\n y_pos = self.ycor()\n x_pos = self.xcor()\n self.goto(y=y_pos - 20, x=x_pos)\n\n def increase_score(self):\n self.score += 1\n",
"step-3": "<mask token>\n\n\nclass Paddle(Turtle):\n\n def __init__(self, x_position, y_position):\n super().__init__()\n self.shape('square')\n self.shapesize(stretch_wid=5, stretch_len=1)\n self.penup()\n self.color('white')\n self.goto(x=x_position, y=y_position)\n self.speed('fastest')\n self.score = 0\n\n def up(self):\n y_pos = self.ycor()\n x_pos = self.xcor()\n self.goto(y=y_pos + 20, x=x_pos)\n\n def down(self):\n y_pos = self.ycor()\n x_pos = self.xcor()\n self.goto(y=y_pos - 20, x=x_pos)\n\n def increase_score(self):\n self.score += 1\n",
"step-4": "from turtle import Turtle\n\n\nclass Paddle(Turtle):\n\n def __init__(self, x_position, y_position):\n super().__init__()\n self.shape('square')\n self.shapesize(stretch_wid=5, stretch_len=1)\n self.penup()\n self.color('white')\n self.goto(x=x_position, y=y_position)\n self.speed('fastest')\n self.score = 0\n\n def up(self):\n y_pos = self.ycor()\n x_pos = self.xcor()\n self.goto(y=y_pos + 20, x=x_pos)\n\n def down(self):\n y_pos = self.ycor()\n x_pos = self.xcor()\n self.goto(y=y_pos - 20, x=x_pos)\n\n def increase_score(self):\n self.score += 1\n",
"step-5": "from turtle import Turtle\n\n\nclass Paddle(Turtle):\n def __init__(self, x_position, y_position):\n super().__init__()\n self.shape('square')\n self.shapesize(stretch_wid=5, stretch_len=1)\n self.penup()\n self.color(\"white\")\n self.goto(x=x_position, y=y_position)\n self.speed(\"fastest\")\n self.score = 0\n\n def up(self):\n y_pos = self.ycor()\n x_pos = self.xcor()\n self.goto(y=y_pos + 20, x=x_pos)\n\n def down(self):\n y_pos = self.ycor()\n x_pos = self.xcor()\n self.goto(y=y_pos - 20, x=x_pos)\n\n def increase_score(self):\n self.score += 1\n\n\n\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
class FastCountQuerySet:
def __init__(self, queryset, tablename):
self.queryset = queryset
self.tablename = tablename
def count(self):
cursor = connection.cursor()
cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',
[self.tablename])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, item):
return self.queryset[item]
class MetadataCountQuerySet:
def __init__(self, queryset, propertyname):
self.queryset = queryset
self.propertyname = propertyname
def count(self):
cursor = connection.cursor()
cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',
[self.propertyname])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, key):
return self.queryset[key]
def index(request):
metadata = {}
expired_certs = 0
active_certs = 0
total_certs = 0
total_cas = 0
messages = []
if 'subok' in request.GET:
messages.append({'class': 'alert-info', 'text':
'<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'
})
if 'unsubok' in request.GET:
messages.append({'class': 'alert-info', 'text':
'<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'
})
subscribeform = SubscribeUnsubscribeForm()
with connection.cursor() as c:
c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')
rows = c.fetchall()
for row in rows:
metadata[row[0]] = row[1]
return render(request, 'observer/index.html', {'total_certs': metadata[
'number_of_certs'], 'total_ca': metadata['number_of_cas'],
'total_logs': CtLog.objects.count(), 'active_certs': metadata[
'number_of_active_certs'], 'expired_certs': metadata[
'number_of_expired_certs'], 'revoked_certs': metadata[
'number_of_revoked_certs'], 'misissued_certs': metadata[
'number_of_misissued_certs'], 'behaving_cas': metadata[
'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[
'number_of_interesting_cas'], 'biggest_log': metadata[
'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.
objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':
metadata['number_of_certs_in_smallest_log'], 'uptime_days': (
timezone.now().date() - datetime.date(2015, 10, 14)).days,
'messages': messages, 'subscribeform': subscribeform})
def search(request):
term = request.GET.get('term', '')
return render(request, 'observer/search.html', {'term': term})
def caall(request, page=None):
if page == None:
return HttpResponsePermanentRedirect('all/1')
page = int(page)
list_of_certs = []
filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(
'common_name'))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/cas.html', {'list_of_ca':
list_of_certs, 'filter': filtered_qs})
def certall(request, page=None, ae=None, issuer_ca=None):
if page == None:
return HttpResponsePermanentRedirect('all/1')
ae = request.GET.get('algorithm')
issuer_ca = request.GET.get('issuer_ca')
date_notbefore = request.GET.get('date_notbefore')
date_notbefore_gte = request.GET.get('date_notbefore_gte')
is_active = request.GET.get('is_active')
date_notafter = request.GET.get('date_notafter')
date_notafter_lte = request.GET.get('date_notafter_lte')
page = int(page)
list_of_certs = []
filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(
Certificate.objects.all().order_by('-id'), 'certificate'))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),
'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active == '1' or is_active == '' or is_active == None:
if issuer_ca != None and (is_active == None or is_active == ''):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and (issuer_ca == None or issuer_ca == ''):
query = FastCountQuerySet(Certificate.objects.filter(
not_before__lte=timezone.now(), not_after__gte=timezone.now
()), 'certificate')
if issuer_ca == '' and is_active == '':
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and issuer_ca != None:
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca, not_before__lte
=timezone.now(), not_after__gte=timezone.now()), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active == '0' or is_active == '' or is_active == None:
if issuer_ca != None and (is_active == None or is_active == ''):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and (issuer_ca == None or issuer_ca == ''):
query = FastCountQuerySet(Certificate.objects.filter(
not_after__lte=datetime.date.today()), 'certificate')
if issuer_ca == '' and is_active == '':
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and issuer_ca != None:
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca, not_after__lte=
datetime.date.today()), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs, 'filter': filtered_qs})
<|reserved_special_token_0|>
def certexpired(request, page=None, order=None):
if page == None:
return HttpResponsePermanentRedirect('expired/1')
page = int(page)
list_of_certs = []
paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(
not_after__lt=timezone.now()), 'number_of_expired_certs'),
ITEMS_PER_PAGE)
if page in paginator.page_range:
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs})
def certrevoked(request, page=None):
if page == None:
return HttpResponsePermanentRedirect('revoked/1')
page = int(page)
list_of_certs = []
paginator = Paginator(Certificate.objects.filter(id__in=
RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE
)
if page in paginator.page_range:
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs})
def certs_by_log(request, log_id, page=None):
if page == None:
return HttpResponsePermanentRedirect('./1')
page = int(page)
log_id = int(log_id)
list_of_certs = []
paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),
ITEMS_PER_PAGE)
if page in paginator.page_range:
list_of_entries = paginator.page(page)
return render(request, 'observer/log_certs.html', {'log':
get_object_or_404(CtLog, pk=log_id), 'list_of_entries':
list_of_entries})
def certs_by_ca(request, ca_id, page=None):
if page == None:
return HttpResponsePermanentRedirect('certificates/1')
page = int(page)
ca_id = int(ca_id)
list_of_certs = []
filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.
filter(issuer_ca=ca_id))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs, 'filter': filtered_qs})
<|reserved_special_token_0|>
def list_dnsname_certs(request, dnsname):
field_id = 'dnsname'
expression = dnsname
list_of_certs = Certificate.objects.raw(
"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC"
, [dnsname])
issues = issuefinder.get_all_issues(list(list_of_certs))
return render(request, 'observer/history.html', {'field_id': field_id,
'expression': expression, 'list_of_certs': list_of_certs, 'issues':
issues})
def log(request):
return render(request, 'observer/logs.html', {'list_of_logs': CtLog.
objects.all().order_by('-is_active', '-latest_entry_id', 'name')})
def cadetail(request, ca_id):
ca = get_object_or_404(Ca, pk=ca_id)
number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()
return render(request, 'observer/cadetail.html', {'ca': ca,
'number_of_issued_ca': number_of_issued_ca})
def certdetail(request, cert_id=None, cert_sha256=None):
if cert_sha256:
cert_sha256_bin = cert_sha256.decode('hex')
cert = get_object_or_404(Certificate, certificate__sha256=
cert_sha256_bin)
if cert_id:
cert = get_object_or_404(Certificate, pk=cert_id)
cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()
digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1
]
return render(request, 'observer/certdetail.html', {'certificate': cert,
'ca_certificate': cacert, 'keysize_distribution': 'TODO',
'digest_sha256': digest_sha256})
<|reserved_special_token_0|>
def logdetail(request, log_id):
log = get_object_or_404(CtLog, pk=log_id)
number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()
return render(request, 'observer/logdetail.html', {'log': log,
'number_of_issued_ca': number_of_issued_ca})
<|reserved_special_token_0|>
def issues(request):
return render(request, 'observer/issues.html')
def status(request):
status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':
'ok'}
try:
with open('/static/data/status.json', 'r') as f:
status = json.load(f)
status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status
['analyzer']['lastrun'])
status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status
['monitor']['lastrun'])
except Exception as e:
status['msg'] = 'Could not load status file.' + str(e)
return render(request, 'observer/status.html', {'status': status})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class FastCountQuerySet:
def __init__(self, queryset, tablename):
self.queryset = queryset
self.tablename = tablename
def count(self):
cursor = connection.cursor()
cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',
[self.tablename])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, item):
return self.queryset[item]
class MetadataCountQuerySet:
def __init__(self, queryset, propertyname):
self.queryset = queryset
self.propertyname = propertyname
def count(self):
cursor = connection.cursor()
cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',
[self.propertyname])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, key):
return self.queryset[key]
def index(request):
metadata = {}
expired_certs = 0
active_certs = 0
total_certs = 0
total_cas = 0
messages = []
if 'subok' in request.GET:
messages.append({'class': 'alert-info', 'text':
'<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'
})
if 'unsubok' in request.GET:
messages.append({'class': 'alert-info', 'text':
'<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'
})
subscribeform = SubscribeUnsubscribeForm()
with connection.cursor() as c:
c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')
rows = c.fetchall()
for row in rows:
metadata[row[0]] = row[1]
return render(request, 'observer/index.html', {'total_certs': metadata[
'number_of_certs'], 'total_ca': metadata['number_of_cas'],
'total_logs': CtLog.objects.count(), 'active_certs': metadata[
'number_of_active_certs'], 'expired_certs': metadata[
'number_of_expired_certs'], 'revoked_certs': metadata[
'number_of_revoked_certs'], 'misissued_certs': metadata[
'number_of_misissued_certs'], 'behaving_cas': metadata[
'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[
'number_of_interesting_cas'], 'biggest_log': metadata[
'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.
objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':
metadata['number_of_certs_in_smallest_log'], 'uptime_days': (
timezone.now().date() - datetime.date(2015, 10, 14)).days,
'messages': messages, 'subscribeform': subscribeform})
def search(request):
term = request.GET.get('term', '')
return render(request, 'observer/search.html', {'term': term})
def caall(request, page=None):
if page == None:
return HttpResponsePermanentRedirect('all/1')
page = int(page)
list_of_certs = []
filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(
'common_name'))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/cas.html', {'list_of_ca':
list_of_certs, 'filter': filtered_qs})
def certall(request, page=None, ae=None, issuer_ca=None):
if page == None:
return HttpResponsePermanentRedirect('all/1')
ae = request.GET.get('algorithm')
issuer_ca = request.GET.get('issuer_ca')
date_notbefore = request.GET.get('date_notbefore')
date_notbefore_gte = request.GET.get('date_notbefore_gte')
is_active = request.GET.get('is_active')
date_notafter = request.GET.get('date_notafter')
date_notafter_lte = request.GET.get('date_notafter_lte')
page = int(page)
list_of_certs = []
filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(
Certificate.objects.all().order_by('-id'), 'certificate'))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),
'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active == '1' or is_active == '' or is_active == None:
if issuer_ca != None and (is_active == None or is_active == ''):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and (issuer_ca == None or issuer_ca == ''):
query = FastCountQuerySet(Certificate.objects.filter(
not_before__lte=timezone.now(), not_after__gte=timezone.now
()), 'certificate')
if issuer_ca == '' and is_active == '':
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and issuer_ca != None:
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca, not_before__lte
=timezone.now(), not_after__gte=timezone.now()), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active == '0' or is_active == '' or is_active == None:
if issuer_ca != None and (is_active == None or is_active == ''):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and (issuer_ca == None or issuer_ca == ''):
query = FastCountQuerySet(Certificate.objects.filter(
not_after__lte=datetime.date.today()), 'certificate')
if issuer_ca == '' and is_active == '':
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and issuer_ca != None:
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca, not_after__lte=
datetime.date.today()), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs, 'filter': filtered_qs})
<|reserved_special_token_0|>
def certexpired(request, page=None, order=None):
if page == None:
return HttpResponsePermanentRedirect('expired/1')
page = int(page)
list_of_certs = []
paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(
not_after__lt=timezone.now()), 'number_of_expired_certs'),
ITEMS_PER_PAGE)
if page in paginator.page_range:
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs})
def certrevoked(request, page=None):
if page == None:
return HttpResponsePermanentRedirect('revoked/1')
page = int(page)
list_of_certs = []
paginator = Paginator(Certificate.objects.filter(id__in=
RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE
)
if page in paginator.page_range:
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs})
def certs_by_log(request, log_id, page=None):
if page == None:
return HttpResponsePermanentRedirect('./1')
page = int(page)
log_id = int(log_id)
list_of_certs = []
paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),
ITEMS_PER_PAGE)
if page in paginator.page_range:
list_of_entries = paginator.page(page)
return render(request, 'observer/log_certs.html', {'log':
get_object_or_404(CtLog, pk=log_id), 'list_of_entries':
list_of_entries})
def certs_by_ca(request, ca_id, page=None):
if page == None:
return HttpResponsePermanentRedirect('certificates/1')
page = int(page)
ca_id = int(ca_id)
list_of_certs = []
filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.
filter(issuer_ca=ca_id))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs, 'filter': filtered_qs})
def list_cn_certs(request, cn):
field_id = 'common name'
expression = cn
list_of_certs = Certificate.objects.raw(
"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC"
, [cn])
issues = issuefinder.get_all_issues(list(list_of_certs))
return render(request, 'observer/history.html', {'field_id': field_id,
'expression': expression, 'list_of_certs': list_of_certs, 'issues':
issues})
def list_dnsname_certs(request, dnsname):
field_id = 'dnsname'
expression = dnsname
list_of_certs = Certificate.objects.raw(
"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC"
, [dnsname])
issues = issuefinder.get_all_issues(list(list_of_certs))
return render(request, 'observer/history.html', {'field_id': field_id,
'expression': expression, 'list_of_certs': list_of_certs, 'issues':
issues})
def log(request):
return render(request, 'observer/logs.html', {'list_of_logs': CtLog.
objects.all().order_by('-is_active', '-latest_entry_id', 'name')})
def cadetail(request, ca_id):
ca = get_object_or_404(Ca, pk=ca_id)
number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()
return render(request, 'observer/cadetail.html', {'ca': ca,
'number_of_issued_ca': number_of_issued_ca})
def certdetail(request, cert_id=None, cert_sha256=None):
if cert_sha256:
cert_sha256_bin = cert_sha256.decode('hex')
cert = get_object_or_404(Certificate, certificate__sha256=
cert_sha256_bin)
if cert_id:
cert = get_object_or_404(Certificate, pk=cert_id)
cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()
digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1
]
return render(request, 'observer/certdetail.html', {'certificate': cert,
'ca_certificate': cacert, 'keysize_distribution': 'TODO',
'digest_sha256': digest_sha256})
<|reserved_special_token_0|>
def logdetail(request, log_id):
log = get_object_or_404(CtLog, pk=log_id)
number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()
return render(request, 'observer/logdetail.html', {'log': log,
'number_of_issued_ca': number_of_issued_ca})
<|reserved_special_token_0|>
def issues(request):
return render(request, 'observer/issues.html')
def status(request):
status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':
'ok'}
try:
with open('/static/data/status.json', 'r') as f:
status = json.load(f)
status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status
['analyzer']['lastrun'])
status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status
['monitor']['lastrun'])
except Exception as e:
status['msg'] = 'Could not load status file.' + str(e)
return render(request, 'observer/status.html', {'status': status})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
class FastCountQuerySet:
def __init__(self, queryset, tablename):
self.queryset = queryset
self.tablename = tablename
def count(self):
cursor = connection.cursor()
cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',
[self.tablename])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, item):
return self.queryset[item]
class MetadataCountQuerySet:
def __init__(self, queryset, propertyname):
self.queryset = queryset
self.propertyname = propertyname
def count(self):
cursor = connection.cursor()
cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',
[self.propertyname])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, key):
return self.queryset[key]
def index(request):
metadata = {}
expired_certs = 0
active_certs = 0
total_certs = 0
total_cas = 0
messages = []
if 'subok' in request.GET:
messages.append({'class': 'alert-info', 'text':
'<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'
})
if 'unsubok' in request.GET:
messages.append({'class': 'alert-info', 'text':
'<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'
})
subscribeform = SubscribeUnsubscribeForm()
with connection.cursor() as c:
c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')
rows = c.fetchall()
for row in rows:
metadata[row[0]] = row[1]
return render(request, 'observer/index.html', {'total_certs': metadata[
'number_of_certs'], 'total_ca': metadata['number_of_cas'],
'total_logs': CtLog.objects.count(), 'active_certs': metadata[
'number_of_active_certs'], 'expired_certs': metadata[
'number_of_expired_certs'], 'revoked_certs': metadata[
'number_of_revoked_certs'], 'misissued_certs': metadata[
'number_of_misissued_certs'], 'behaving_cas': metadata[
'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[
'number_of_interesting_cas'], 'biggest_log': metadata[
'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.
objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':
metadata['number_of_certs_in_smallest_log'], 'uptime_days': (
timezone.now().date() - datetime.date(2015, 10, 14)).days,
'messages': messages, 'subscribeform': subscribeform})
def search(request):
term = request.GET.get('term', '')
return render(request, 'observer/search.html', {'term': term})
def caall(request, page=None):
if page == None:
return HttpResponsePermanentRedirect('all/1')
page = int(page)
list_of_certs = []
filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(
'common_name'))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/cas.html', {'list_of_ca':
list_of_certs, 'filter': filtered_qs})
def certall(request, page=None, ae=None, issuer_ca=None):
if page == None:
return HttpResponsePermanentRedirect('all/1')
ae = request.GET.get('algorithm')
issuer_ca = request.GET.get('issuer_ca')
date_notbefore = request.GET.get('date_notbefore')
date_notbefore_gte = request.GET.get('date_notbefore_gte')
is_active = request.GET.get('is_active')
date_notafter = request.GET.get('date_notafter')
date_notafter_lte = request.GET.get('date_notafter_lte')
page = int(page)
list_of_certs = []
filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(
Certificate.objects.all().order_by('-id'), 'certificate'))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),
'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active == '1' or is_active == '' or is_active == None:
if issuer_ca != None and (is_active == None or is_active == ''):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and (issuer_ca == None or issuer_ca == ''):
query = FastCountQuerySet(Certificate.objects.filter(
not_before__lte=timezone.now(), not_after__gte=timezone.now
()), 'certificate')
if issuer_ca == '' and is_active == '':
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and issuer_ca != None:
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca, not_before__lte
=timezone.now(), not_after__gte=timezone.now()), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active == '0' or is_active == '' or is_active == None:
if issuer_ca != None and (is_active == None or is_active == ''):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and (issuer_ca == None or issuer_ca == ''):
query = FastCountQuerySet(Certificate.objects.filter(
not_after__lte=datetime.date.today()), 'certificate')
if issuer_ca == '' and is_active == '':
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and issuer_ca != None:
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca, not_after__lte=
datetime.date.today()), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs, 'filter': filtered_qs})
<|reserved_special_token_0|>
def certexpired(request, page=None, order=None):
if page == None:
return HttpResponsePermanentRedirect('expired/1')
page = int(page)
list_of_certs = []
paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(
not_after__lt=timezone.now()), 'number_of_expired_certs'),
ITEMS_PER_PAGE)
if page in paginator.page_range:
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs})
def certrevoked(request, page=None):
if page == None:
return HttpResponsePermanentRedirect('revoked/1')
page = int(page)
list_of_certs = []
paginator = Paginator(Certificate.objects.filter(id__in=
RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE
)
if page in paginator.page_range:
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs})
def certs_by_log(request, log_id, page=None):
if page == None:
return HttpResponsePermanentRedirect('./1')
page = int(page)
log_id = int(log_id)
list_of_certs = []
paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),
ITEMS_PER_PAGE)
if page in paginator.page_range:
list_of_entries = paginator.page(page)
return render(request, 'observer/log_certs.html', {'log':
get_object_or_404(CtLog, pk=log_id), 'list_of_entries':
list_of_entries})
def certs_by_ca(request, ca_id, page=None):
if page == None:
return HttpResponsePermanentRedirect('certificates/1')
page = int(page)
ca_id = int(ca_id)
list_of_certs = []
filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.
filter(issuer_ca=ca_id))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs, 'filter': filtered_qs})
def list_cn_certs(request, cn):
field_id = 'common name'
expression = cn
list_of_certs = Certificate.objects.raw(
"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC"
, [cn])
issues = issuefinder.get_all_issues(list(list_of_certs))
return render(request, 'observer/history.html', {'field_id': field_id,
'expression': expression, 'list_of_certs': list_of_certs, 'issues':
issues})
def list_dnsname_certs(request, dnsname):
field_id = 'dnsname'
expression = dnsname
list_of_certs = Certificate.objects.raw(
"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC"
, [dnsname])
issues = issuefinder.get_all_issues(list(list_of_certs))
return render(request, 'observer/history.html', {'field_id': field_id,
'expression': expression, 'list_of_certs': list_of_certs, 'issues':
issues})
def log(request):
return render(request, 'observer/logs.html', {'list_of_logs': CtLog.
objects.all().order_by('-is_active', '-latest_entry_id', 'name')})
def cadetail(request, ca_id):
ca = get_object_or_404(Ca, pk=ca_id)
number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()
return render(request, 'observer/cadetail.html', {'ca': ca,
'number_of_issued_ca': number_of_issued_ca})
def certdetail(request, cert_id=None, cert_sha256=None):
if cert_sha256:
cert_sha256_bin = cert_sha256.decode('hex')
cert = get_object_or_404(Certificate, certificate__sha256=
cert_sha256_bin)
if cert_id:
cert = get_object_or_404(Certificate, pk=cert_id)
cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()
digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1
]
return render(request, 'observer/certdetail.html', {'certificate': cert,
'ca_certificate': cacert, 'keysize_distribution': 'TODO',
'digest_sha256': digest_sha256})
<|reserved_special_token_0|>
def logdetail(request, log_id):
log = get_object_or_404(CtLog, pk=log_id)
number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()
return render(request, 'observer/logdetail.html', {'log': log,
'number_of_issued_ca': number_of_issued_ca})
def flag(request, flag_id):
try:
with open(os.path.join(BASE_DIR, 'static/flags/png/{0}.png'.format(
flag_id.lower())), 'rb') as f:
return HttpResponse(f.read(), content_type='image/png')
except IOError:
with open(os.path.join(BASE_DIR, 'static/flags/png/-.png'), 'rb') as f:
return HttpResponse(f.read(), content_type='image/png')
<|reserved_special_token_0|>
def issues(request):
return render(request, 'observer/issues.html')
def status(request):
status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':
'ok'}
try:
with open('/static/data/status.json', 'r') as f:
status = json.load(f)
status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status
['analyzer']['lastrun'])
status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status
['monitor']['lastrun'])
except Exception as e:
status['msg'] = 'Could not load status file.' + str(e)
return render(request, 'observer/status.html', {'status': status})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
class FastCountQuerySet:
def __init__(self, queryset, tablename):
self.queryset = queryset
self.tablename = tablename
def count(self):
cursor = connection.cursor()
cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',
[self.tablename])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, item):
return self.queryset[item]
class MetadataCountQuerySet:
def __init__(self, queryset, propertyname):
self.queryset = queryset
self.propertyname = propertyname
def count(self):
cursor = connection.cursor()
cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',
[self.propertyname])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, key):
return self.queryset[key]
def index(request):
metadata = {}
expired_certs = 0
active_certs = 0
total_certs = 0
total_cas = 0
messages = []
if 'subok' in request.GET:
messages.append({'class': 'alert-info', 'text':
'<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'
})
if 'unsubok' in request.GET:
messages.append({'class': 'alert-info', 'text':
'<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'
})
subscribeform = SubscribeUnsubscribeForm()
with connection.cursor() as c:
c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')
rows = c.fetchall()
for row in rows:
metadata[row[0]] = row[1]
return render(request, 'observer/index.html', {'total_certs': metadata[
'number_of_certs'], 'total_ca': metadata['number_of_cas'],
'total_logs': CtLog.objects.count(), 'active_certs': metadata[
'number_of_active_certs'], 'expired_certs': metadata[
'number_of_expired_certs'], 'revoked_certs': metadata[
'number_of_revoked_certs'], 'misissued_certs': metadata[
'number_of_misissued_certs'], 'behaving_cas': metadata[
'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[
'number_of_interesting_cas'], 'biggest_log': metadata[
'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.
objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':
metadata['number_of_certs_in_smallest_log'], 'uptime_days': (
timezone.now().date() - datetime.date(2015, 10, 14)).days,
'messages': messages, 'subscribeform': subscribeform})
def search(request):
term = request.GET.get('term', '')
return render(request, 'observer/search.html', {'term': term})
def caall(request, page=None):
if page == None:
return HttpResponsePermanentRedirect('all/1')
page = int(page)
list_of_certs = []
filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(
'common_name'))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/cas.html', {'list_of_ca':
list_of_certs, 'filter': filtered_qs})
def certall(request, page=None, ae=None, issuer_ca=None):
if page == None:
return HttpResponsePermanentRedirect('all/1')
ae = request.GET.get('algorithm')
issuer_ca = request.GET.get('issuer_ca')
date_notbefore = request.GET.get('date_notbefore')
date_notbefore_gte = request.GET.get('date_notbefore_gte')
is_active = request.GET.get('is_active')
date_notafter = request.GET.get('date_notafter')
date_notafter_lte = request.GET.get('date_notafter_lte')
page = int(page)
list_of_certs = []
filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(
Certificate.objects.all().order_by('-id'), 'certificate'))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),
'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active == '1' or is_active == '' or is_active == None:
if issuer_ca != None and (is_active == None or is_active == ''):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and (issuer_ca == None or issuer_ca == ''):
query = FastCountQuerySet(Certificate.objects.filter(
not_before__lte=timezone.now(), not_after__gte=timezone.now
()), 'certificate')
if issuer_ca == '' and is_active == '':
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and issuer_ca != None:
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca, not_before__lte
=timezone.now(), not_after__gte=timezone.now()), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active == '0' or is_active == '' or is_active == None:
if issuer_ca != None and (is_active == None or is_active == ''):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and (issuer_ca == None or issuer_ca == ''):
query = FastCountQuerySet(Certificate.objects.filter(
not_after__lte=datetime.date.today()), 'certificate')
if issuer_ca == '' and is_active == '':
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if is_active != None and issuer_ca != None:
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains=issuer_ca, not_after__lte=
datetime.date.today()), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs, 'filter': filtered_qs})
<|reserved_special_token_0|>
def certexpired(request, page=None, order=None):
if page == None:
return HttpResponsePermanentRedirect('expired/1')
page = int(page)
list_of_certs = []
paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(
not_after__lt=timezone.now()), 'number_of_expired_certs'),
ITEMS_PER_PAGE)
if page in paginator.page_range:
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs})
def certrevoked(request, page=None):
if page == None:
return HttpResponsePermanentRedirect('revoked/1')
page = int(page)
list_of_certs = []
paginator = Paginator(Certificate.objects.filter(id__in=
RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE
)
if page in paginator.page_range:
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs})
def certs_by_log(request, log_id, page=None):
if page == None:
return HttpResponsePermanentRedirect('./1')
page = int(page)
log_id = int(log_id)
list_of_certs = []
paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),
ITEMS_PER_PAGE)
if page in paginator.page_range:
list_of_entries = paginator.page(page)
return render(request, 'observer/log_certs.html', {'log':
get_object_or_404(CtLog, pk=log_id), 'list_of_entries':
list_of_entries})
def certs_by_ca(request, ca_id, page=None):
if page == None:
return HttpResponsePermanentRedirect('certificates/1')
page = int(page)
ca_id = int(ca_id)
list_of_certs = []
filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.
filter(issuer_ca=ca_id))
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/certs.html', {'list_of_certs':
list_of_certs, 'filter': filtered_qs})
def list_cn_certs(request, cn):
field_id = 'common name'
expression = cn
list_of_certs = Certificate.objects.raw(
"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC"
, [cn])
issues = issuefinder.get_all_issues(list(list_of_certs))
return render(request, 'observer/history.html', {'field_id': field_id,
'expression': expression, 'list_of_certs': list_of_certs, 'issues':
issues})
def list_dnsname_certs(request, dnsname):
field_id = 'dnsname'
expression = dnsname
list_of_certs = Certificate.objects.raw(
"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC"
, [dnsname])
issues = issuefinder.get_all_issues(list(list_of_certs))
return render(request, 'observer/history.html', {'field_id': field_id,
'expression': expression, 'list_of_certs': list_of_certs, 'issues':
issues})
def log(request):
return render(request, 'observer/logs.html', {'list_of_logs': CtLog.
objects.all().order_by('-is_active', '-latest_entry_id', 'name')})
def cadetail(request, ca_id):
ca = get_object_or_404(Ca, pk=ca_id)
number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()
return render(request, 'observer/cadetail.html', {'ca': ca,
'number_of_issued_ca': number_of_issued_ca})
def certdetail(request, cert_id=None, cert_sha256=None):
if cert_sha256:
cert_sha256_bin = cert_sha256.decode('hex')
cert = get_object_or_404(Certificate, certificate__sha256=
cert_sha256_bin)
if cert_id:
cert = get_object_or_404(Certificate, pk=cert_id)
cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()
digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1
]
return render(request, 'observer/certdetail.html', {'certificate': cert,
'ca_certificate': cacert, 'keysize_distribution': 'TODO',
'digest_sha256': digest_sha256})
<|reserved_special_token_0|>
def logdetail(request, log_id):
log = get_object_or_404(CtLog, pk=log_id)
number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()
return render(request, 'observer/logdetail.html', {'log': log,
'number_of_issued_ca': number_of_issued_ca})
def flag(request, flag_id):
try:
with open(os.path.join(BASE_DIR, 'static/flags/png/{0}.png'.format(
flag_id.lower())), 'rb') as f:
return HttpResponse(f.read(), content_type='image/png')
except IOError:
with open(os.path.join(BASE_DIR, 'static/flags/png/-.png'), 'rb') as f:
return HttpResponse(f.read(), content_type='image/png')
def imprint(request):
return render(request, 'observer/imprint.html')
def issues(request):
return render(request, 'observer/issues.html')
def status(request):
status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':
'ok'}
try:
with open('/static/data/status.json', 'r') as f:
status = json.load(f)
status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status
['analyzer']['lastrun'])
status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status
['monitor']['lastrun'])
except Exception as e:
status['msg'] = 'Could not load status file.' + str(e)
return render(request, 'observer/status.html', {'status': status})
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from django.shortcuts import render, get_object_or_404
from django.utils import timezone
from django.db.models import Count
from django.db.models import QuerySet
from django.db import connection
from django.core.paginator import Paginator, PageNotAnInteger
from django.http import HttpResponse
from django.http import HttpResponsePermanentRedirect
import datetime
import os
import json
from ctobservatory.settings import BASE_DIR
from .models import *
from notification.forms import SubscribeUnsubscribeForm
#from .issuefinder import *
import observer.issuefinder as issuefinder
from django.template.defaulttags import register
import hashlib
import psycopg2
ITEMS_PER_PAGE = 50
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
class FastCountQuerySet():
def __init__(self, queryset, tablename):
self.queryset = queryset
self.tablename = tablename
def count(self):
cursor = connection.cursor()
cursor.execute("SELECT reltuples FROM pg_class WHERE relname = %s", [self.tablename])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
# passthrough all the other methods
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, item):
return self.queryset[item]
class MetadataCountQuerySet():
def __init__(self, queryset, propertyname):
self.queryset = queryset
self.propertyname = propertyname
def count(self):
cursor = connection.cursor()
cursor.execute("SELECT name_value FROM metadata WHERE name_type = %s", [self.propertyname])
row = cursor.fetchone()
count = int(row[0])
cursor.close()
return count
# passthrough all the other methods
def __getattr__(self, attr):
try:
return object.__getattr__(self, attr)
except AttributeError:
return getattr(self.queryset, attr)
def __getitem__(self, key):
return self.queryset[key]
def index(request):
metadata = {}
expired_certs = 0
active_certs = 0
total_certs = 0
total_cas = 0
messages = []
if('subok' in request.GET):
messages.append({'class':'alert-info','text':'<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'})
if('unsubok' in request.GET):
messages.append({'class':'alert-info','text':'<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'})
subscribeform = SubscribeUnsubscribeForm()
with connection.cursor() as c:
c.execute("SELECT NAME_TYPE, NAME_VALUE FROM metadata")
rows = c.fetchall()
for row in rows:
metadata[row[0]] = row[1]
return render(request, 'observer/index.html',
{
'total_certs': metadata['number_of_certs'],
'total_ca': metadata['number_of_cas'],
'total_logs': CtLog.objects.count(),
'active_certs': metadata['number_of_active_certs'],
'expired_certs': metadata['number_of_expired_certs'],
'revoked_certs': metadata['number_of_revoked_certs'],
'misissued_certs': metadata['number_of_misissued_certs'],
'behaving_cas' : metadata['number_of_correctly_behaving_cas'],
'interesting_cas' : metadata['number_of_interesting_cas'],
'biggest_log' : metadata['number_of_certs_in_biggest_log'],
'biggest_log_name' : CtLog.objects.get(id=metadata['biggest_log_id']).name,
'smallest_log' : metadata['number_of_certs_in_smallest_log'],
'uptime_days': (timezone.now().date()-datetime.date(2015,10,14)).days, #TODO
'messages' : messages,
'subscribeform' : subscribeform
}
)
def search(request):
term = request.GET.get("term","")
#found_ca = Ca.objects.filter(name__icontains=term)
#found_cn_dnsname = Certificate.objects.raw("SELECT DISTINCT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, x509_notBefore(CERTIFICATE) FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE (NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE)) LIKE reverse(lower(%s))) OR (NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE)) LIKE reverse(lower(%s)))
#ORDER BY x509_notBefore(CERTIFICATE) DESC", [term, term])
return render(request, 'observer/search.html',
{
'term' : term
#'found_ca' : found_ca,
#'found_cn_dnsname' : found_cn_dnsname
}
)
def caall(request, page=None): #VIEW FOR CAs
if(page==None):
return HttpResponsePermanentRedirect("all/1")
page = int(page)
list_of_certs = []
filtered_qs = CaFilter(
request.GET,
queryset=Ca.objects.all().order_by('common_name')
)
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/cas.html',
{
'list_of_ca': list_of_certs,
'filter': filtered_qs#Ca.objects.annotate(num_certs=Count('certificate')).order_by('-num_certs'),
}
)
def certall(request, page=None, ae=None, issuer_ca=None): #VIEW FOR Certificates->ALL
if(page==None):
return HttpResponsePermanentRedirect("all/1")
ae = request.GET.get("algorithm")
issuer_ca = request.GET.get("issuer_ca")
date_notbefore = request.GET.get("date_notbefore")
date_notbefore_gte = request.GET.get("date_notbefore_gte")
is_active = request.GET.get("is_active")
date_notafter = request.GET.get("date_notafter")
date_notafter_lte = request.GET.get("date_notafter_lte")
page = int(page)
list_of_certs = []
filtered_qs = CertFilter(
request.GET,
queryset=MetadataCountQuerySet(Certificate.objects.all().order_by('-id'), 'certificate')
)
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
#Alternative filter solution for better performance
#https://localhost/cert/all/1?issuer_ca=merge&date_notbefore=&date_notbefore_gte=&is_active=&date_notafter=&date_notafter_lte=
query = FastCountQuerySet(Certificate.objects.all().order_by('-id'), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active == "1" or is_active == "" or is_active == None):
if(issuer_ca != None and (is_active == None or is_active == "")):
query = FastCountQuerySet(Certificate.objects.filter(issuer_ca__common_name__contains = issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active != None and (issuer_ca == None or issuer_ca == "")):
query = FastCountQuerySet(Certificate.objects.filter(not_before__lte=timezone.now(), not_after__gte=timezone.now()), 'certificate')
if(issuer_ca == "" and is_active == ""):
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active != None and issuer_ca != None ):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains = issuer_ca,
not_before__lte=timezone.now(), not_after__gte=timezone.now(), ), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active == "0" or is_active == "" or is_active == None):
if(issuer_ca != None and (is_active == None or is_active == "")):
query = FastCountQuerySet(Certificate.objects.filter(issuer_ca__common_name__contains = issuer_ca), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active != None and (issuer_ca == None or issuer_ca == "")):
query = FastCountQuerySet(Certificate.objects.filter(not_after__lte=datetime.date.today()), 'certificate')
if(issuer_ca == "" and is_active == ""):
query = FastCountQuerySet(Certificate.objects.all(), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
if(is_active != None and issuer_ca != None ):
query = FastCountQuerySet(Certificate.objects.filter(
issuer_ca__common_name__contains = issuer_ca,
not_after__lte=datetime.date.today() ), 'certificate')
paginator = Paginator(query, ITEMS_PER_PAGE)
####################################################
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
#if(ae != None):
#list_of_certs = Certificate.objects.raw("SELECT * FROM certificate WHERE SIGNATURE_ALGORITHM=%s", [ae])
return render(request, 'observer/certs.html',
{
'list_of_certs': list_of_certs,
'filter': filtered_qs
}
)
def certactive(request, page=None):
if(page==None):
return HttpResponsePermanentRedirect("active/1")
page = int(page)
list_of_certs = []
paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(not_before__lte=timezone.now(), not_after__gte=timezone.now()), 'number_of_active_certs'), ITEMS_PER_PAGE)
if(page in paginator.page_range):
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html',
{
'list_of_certs': list_of_certs
}
)
def certexpired(request, page=None, order=None):
if(page==None):
return HttpResponsePermanentRedirect("expired/1")
page = int(page)
list_of_certs = []
paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(not_after__lt=timezone.now()), 'number_of_expired_certs'), ITEMS_PER_PAGE)
# paginator = Paginator(Certificate.objects.filter(not_after__lt=timezone.now()), ITEMS_PER_PAGE)
if(page in paginator.page_range):
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html',
{
'list_of_certs': list_of_certs
}
)
def certrevoked(request, page=None):
if(page==None):
return HttpResponsePermanentRedirect("revoked/1")
page = int(page)
list_of_certs = []
paginator = Paginator(Certificate.objects.filter(id__in=RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE)
if(page in paginator.page_range):
list_of_certs = paginator.page(page)
return render(request, 'observer/certs.html',
{
'list_of_certs': list_of_certs
}
)
def certs_by_log(request, log_id, page=None):
if(page==None):
return HttpResponsePermanentRedirect("./1")
page = int(page)
log_id = int(log_id)
list_of_certs = []
paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id), ITEMS_PER_PAGE)
if(page in paginator.page_range):
list_of_entries = paginator.page(page)
return render(request, 'observer/log_certs.html',
{
'log': get_object_or_404(CtLog, pk=log_id),
'list_of_entries' : list_of_entries
}
)
def certs_by_ca(request, ca_id, page=None):
if(page==None):
return HttpResponsePermanentRedirect("certificates/1")
page = int(page)
ca_id = int(ca_id)
list_of_certs = []
filtered_qs = CertFilter(
request.GET,
queryset=Certificate.objects.filter(issuer_ca=ca_id)
)
paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)
page = request.GET.get('page')
try:
list_of_certs = paginator.page(page)
except PageNotAnInteger:
list_of_certs = paginator.page(1)
return render(request, 'observer/certs.html',
{
'list_of_certs': list_of_certs,
'filter': filtered_qs
})
# paginator = Paginator(Certificate.objects.filter(issuer_ca=ca_id), ITEMS_PER_PAGE)
# if(page in paginator.page_range):
# list_of_certs = paginator.page(page)
# return render(request, 'observer/certs.html',
# {
# 'list_of_certs': list_of_certs
# }
# )
def list_cn_certs(request, cn):
field_id = 'common name'
expression = cn
list_of_certs = Certificate.objects.raw("SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC", [cn])
#list_of_certs = Certificate.objects.filter(certificate__common_name=cn).order_by('not_before')
issues = issuefinder.get_all_issues(list(list_of_certs))
#issues = issuefinder.get_first_certificates(list_of_certs)
return render(request, 'observer/history.html',
{
'field_id': field_id,
'expression': expression,
'list_of_certs': list_of_certs,
'issues':issues
}
)
def list_dnsname_certs(request, dnsname):
field_id = 'dnsname'
expression = dnsname
list_of_certs = Certificate.objects.raw("SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC", [dnsname])
issues = issuefinder.get_all_issues(list(list_of_certs))
return render(request, 'observer/history.html',
{
'field_id': field_id,
'expression': expression,
'list_of_certs': list_of_certs,
'issues':issues
}
)
def log(request): #LOG VIEW
return render(request, 'observer/logs.html',
{
#'list_of_logs': CtLog.objects.all().annotate(entries=Count('ctlogentry')).order_by('latest_entry_id')
'list_of_logs': CtLog.objects.all().order_by('-is_active','-latest_entry_id','name')
}
)
def cadetail(request,ca_id):
ca = get_object_or_404(Ca, pk=ca_id)
#counting number of issued CA's:
number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()
return render(request, 'observer/cadetail.html', { 'ca' : ca, 'number_of_issued_ca': number_of_issued_ca})
def certdetail(request,cert_id=None,cert_sha256=None):
if cert_sha256:
cert_sha256_bin = cert_sha256.decode('hex') #Does not work on python3
cert = get_object_or_404(Certificate, certificate__sha256=cert_sha256_bin)
if cert_id:
cert = get_object_or_404(Certificate, pk=cert_id)
cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()
digest_sha256 = str(cert.get_digest_sha256()).replace(':','').lower()[2:-1]
#TODO
#Certificate.objects.raw("select (select count(*) from certificate WHERE x509_keySize(certificate) = %s)*100/cast(COUNT(*) as float) as percentage, 0 as id FROM certificate;",
#[cert.get_x509_data().get_pubkey().bits()])
#return render(request, 'observer/certdetail.html', { 'certificate' : cert, 'ca_certificate' : cacert, 'keysize_distribution': round(keysize_distribution[0].percentage,2)})
return render(request, 'observer/certdetail.html', { 'certificate' : cert, 'ca_certificate' : cacert, 'keysize_distribution': 'TODO', 'digest_sha256':digest_sha256})
def certraw(request,cert_id):
cert = get_object_or_404(Certificate, pk=cert_id)
response = HttpResponse(cert.certificate, content_type='application/octet-stream')
response['Content-Disposition'] = 'attachment; filename="certificate_{}.crt'.format(cert_id)
return response
def logdetail(request,log_id):
log = get_object_or_404(CtLog, pk=log_id)
number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()
return render(request, 'observer/logdetail.html', { 'log' : log, 'number_of_issued_ca' : number_of_issued_ca})
def flag(request, flag_id):
try:
with open(os.path.join(BASE_DIR, "static/flags/png/{0}.png".format(flag_id.lower())), "rb") as f:
return HttpResponse(f.read(), content_type="image/png")
except IOError:
with open(os.path.join(BASE_DIR, "static/flags/png/-.png"), "rb") as f:
return HttpResponse(f.read(), content_type="image/png")
def imprint(request):
return render(request, 'observer/imprint.html')
def issues(request):
return render(request, 'observer/issues.html')
def status(request):
status = {'analyzer':{'lastrun':0}, 'monitor':{'lastrun':0}, 'msg':'ok'}
try:
with open('/static/data/status.json', 'r') as f:
status = json.load(f)
status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status['analyzer']['lastrun'])
status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status['monitor']['lastrun'])
except Exception as e:
status['msg'] = "Could not load status file."+str(e)
return render(request, 'observer/status.html', {'status':status})
def certcheck(request):
if request.method == 'POST':
serial_post = request.POST['serial']
sqlQuery = """SELECT id FROM certificate WHERE serial=%s"""
sqlQuery_commonName = """SELECT * FROM ca WHERE """
current_time = str(datetime.datetime.now())
serial_int = int(serial_post, 16)
serial = serial_int.to_bytes((serial_int.bit_length() + 15) // 8, 'big', signed=True) or b'\0'
sqlData = (psycopg2.Binary(serial),)
found_serial = Certificate.objects.raw(sqlQuery, sqlData)
if(found_serial):
return HttpResponse(found_serial)
else:
return HttpResponse("none")
return render(request, 'observer/checkserial.html', {})
|
flexible
|
{
"blob_id": "bcc959dcdb60c55897158e85d73c59592b112c12",
"index": 6381,
"step-1": "<mask token>\n\n\nclass FastCountQuerySet:\n\n def __init__(self, queryset, tablename):\n self.queryset = queryset\n self.tablename = tablename\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',\n [self.tablename])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, item):\n return self.queryset[item]\n\n\nclass MetadataCountQuerySet:\n\n def __init__(self, queryset, propertyname):\n self.queryset = queryset\n self.propertyname = propertyname\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',\n [self.propertyname])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, key):\n return self.queryset[key]\n\n\ndef index(request):\n metadata = {}\n expired_certs = 0\n active_certs = 0\n total_certs = 0\n total_cas = 0\n messages = []\n if 'subok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'\n })\n if 'unsubok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'\n })\n subscribeform = SubscribeUnsubscribeForm()\n with connection.cursor() as c:\n c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')\n rows = c.fetchall()\n for row in rows:\n metadata[row[0]] = row[1]\n return render(request, 'observer/index.html', {'total_certs': metadata[\n 'number_of_certs'], 'total_ca': metadata['number_of_cas'],\n 'total_logs': CtLog.objects.count(), 'active_certs': metadata[\n 'number_of_active_certs'], 'expired_certs': metadata[\n 'number_of_expired_certs'], 'revoked_certs': metadata[\n 'number_of_revoked_certs'], 'misissued_certs': metadata[\n 'number_of_misissued_certs'], 'behaving_cas': metadata[\n 'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[\n 'number_of_interesting_cas'], 'biggest_log': metadata[\n 'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.\n objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':\n metadata['number_of_certs_in_smallest_log'], 'uptime_days': (\n timezone.now().date() - datetime.date(2015, 10, 14)).days,\n 'messages': messages, 'subscribeform': subscribeform})\n\n\ndef search(request):\n term = request.GET.get('term', '')\n return render(request, 'observer/search.html', {'term': term})\n\n\ndef caall(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n page = int(page)\n list_of_certs = []\n filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(\n 'common_name'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/cas.html', {'list_of_ca':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef certall(request, page=None, ae=None, issuer_ca=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n ae = request.GET.get('algorithm')\n issuer_ca = request.GET.get('issuer_ca')\n date_notbefore = request.GET.get('date_notbefore')\n date_notbefore_gte = request.GET.get('date_notbefore_gte')\n is_active = request.GET.get('is_active')\n date_notafter = request.GET.get('date_notafter')\n date_notafter_lte = request.GET.get('date_notafter_lte')\n page = int(page)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(\n Certificate.objects.all().order_by('-id'), 'certificate'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),\n 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '1' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_before__lte=timezone.now(), not_after__gte=timezone.now\n ()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_before__lte\n =timezone.now(), not_after__gte=timezone.now()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '0' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_after__lte=datetime.date.today()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_after__lte=\n datetime.date.today()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\n<mask token>\n\n\ndef certexpired(request, page=None, order=None):\n if page == None:\n return HttpResponsePermanentRedirect('expired/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(\n not_after__lt=timezone.now()), 'number_of_expired_certs'),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certrevoked(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('revoked/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(Certificate.objects.filter(id__in=\n RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE\n )\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certs_by_log(request, log_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('./1')\n page = int(page)\n log_id = int(log_id)\n list_of_certs = []\n paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_entries = paginator.page(page)\n return render(request, 'observer/log_certs.html', {'log':\n get_object_or_404(CtLog, pk=log_id), 'list_of_entries':\n list_of_entries})\n\n\ndef certs_by_ca(request, ca_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('certificates/1')\n page = int(page)\n ca_id = int(ca_id)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.\n filter(issuer_ca=ca_id))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\n<mask token>\n\n\ndef list_dnsname_certs(request, dnsname):\n field_id = 'dnsname'\n expression = dnsname\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [dnsname])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef log(request):\n return render(request, 'observer/logs.html', {'list_of_logs': CtLog.\n objects.all().order_by('-is_active', '-latest_entry_id', 'name')})\n\n\ndef cadetail(request, ca_id):\n ca = get_object_or_404(Ca, pk=ca_id)\n number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()\n return render(request, 'observer/cadetail.html', {'ca': ca,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef certdetail(request, cert_id=None, cert_sha256=None):\n if cert_sha256:\n cert_sha256_bin = cert_sha256.decode('hex')\n cert = get_object_or_404(Certificate, certificate__sha256=\n cert_sha256_bin)\n if cert_id:\n cert = get_object_or_404(Certificate, pk=cert_id)\n cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()\n digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1\n ]\n return render(request, 'observer/certdetail.html', {'certificate': cert,\n 'ca_certificate': cacert, 'keysize_distribution': 'TODO',\n 'digest_sha256': digest_sha256})\n\n\n<mask token>\n\n\ndef logdetail(request, log_id):\n log = get_object_or_404(CtLog, pk=log_id)\n number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()\n return render(request, 'observer/logdetail.html', {'log': log,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\n<mask token>\n\n\ndef issues(request):\n return render(request, 'observer/issues.html')\n\n\ndef status(request):\n status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':\n 'ok'}\n try:\n with open('/static/data/status.json', 'r') as f:\n status = json.load(f)\n status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['analyzer']['lastrun'])\n status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['monitor']['lastrun'])\n except Exception as e:\n status['msg'] = 'Could not load status file.' + str(e)\n return render(request, 'observer/status.html', {'status': status})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass FastCountQuerySet:\n\n def __init__(self, queryset, tablename):\n self.queryset = queryset\n self.tablename = tablename\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',\n [self.tablename])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, item):\n return self.queryset[item]\n\n\nclass MetadataCountQuerySet:\n\n def __init__(self, queryset, propertyname):\n self.queryset = queryset\n self.propertyname = propertyname\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',\n [self.propertyname])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, key):\n return self.queryset[key]\n\n\ndef index(request):\n metadata = {}\n expired_certs = 0\n active_certs = 0\n total_certs = 0\n total_cas = 0\n messages = []\n if 'subok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'\n })\n if 'unsubok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'\n })\n subscribeform = SubscribeUnsubscribeForm()\n with connection.cursor() as c:\n c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')\n rows = c.fetchall()\n for row in rows:\n metadata[row[0]] = row[1]\n return render(request, 'observer/index.html', {'total_certs': metadata[\n 'number_of_certs'], 'total_ca': metadata['number_of_cas'],\n 'total_logs': CtLog.objects.count(), 'active_certs': metadata[\n 'number_of_active_certs'], 'expired_certs': metadata[\n 'number_of_expired_certs'], 'revoked_certs': metadata[\n 'number_of_revoked_certs'], 'misissued_certs': metadata[\n 'number_of_misissued_certs'], 'behaving_cas': metadata[\n 'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[\n 'number_of_interesting_cas'], 'biggest_log': metadata[\n 'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.\n objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':\n metadata['number_of_certs_in_smallest_log'], 'uptime_days': (\n timezone.now().date() - datetime.date(2015, 10, 14)).days,\n 'messages': messages, 'subscribeform': subscribeform})\n\n\ndef search(request):\n term = request.GET.get('term', '')\n return render(request, 'observer/search.html', {'term': term})\n\n\ndef caall(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n page = int(page)\n list_of_certs = []\n filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(\n 'common_name'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/cas.html', {'list_of_ca':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef certall(request, page=None, ae=None, issuer_ca=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n ae = request.GET.get('algorithm')\n issuer_ca = request.GET.get('issuer_ca')\n date_notbefore = request.GET.get('date_notbefore')\n date_notbefore_gte = request.GET.get('date_notbefore_gte')\n is_active = request.GET.get('is_active')\n date_notafter = request.GET.get('date_notafter')\n date_notafter_lte = request.GET.get('date_notafter_lte')\n page = int(page)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(\n Certificate.objects.all().order_by('-id'), 'certificate'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),\n 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '1' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_before__lte=timezone.now(), not_after__gte=timezone.now\n ()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_before__lte\n =timezone.now(), not_after__gte=timezone.now()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '0' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_after__lte=datetime.date.today()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_after__lte=\n datetime.date.today()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\n<mask token>\n\n\ndef certexpired(request, page=None, order=None):\n if page == None:\n return HttpResponsePermanentRedirect('expired/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(\n not_after__lt=timezone.now()), 'number_of_expired_certs'),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certrevoked(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('revoked/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(Certificate.objects.filter(id__in=\n RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE\n )\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certs_by_log(request, log_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('./1')\n page = int(page)\n log_id = int(log_id)\n list_of_certs = []\n paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_entries = paginator.page(page)\n return render(request, 'observer/log_certs.html', {'log':\n get_object_or_404(CtLog, pk=log_id), 'list_of_entries':\n list_of_entries})\n\n\ndef certs_by_ca(request, ca_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('certificates/1')\n page = int(page)\n ca_id = int(ca_id)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.\n filter(issuer_ca=ca_id))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef list_cn_certs(request, cn):\n field_id = 'common name'\n expression = cn\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [cn])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef list_dnsname_certs(request, dnsname):\n field_id = 'dnsname'\n expression = dnsname\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [dnsname])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef log(request):\n return render(request, 'observer/logs.html', {'list_of_logs': CtLog.\n objects.all().order_by('-is_active', '-latest_entry_id', 'name')})\n\n\ndef cadetail(request, ca_id):\n ca = get_object_or_404(Ca, pk=ca_id)\n number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()\n return render(request, 'observer/cadetail.html', {'ca': ca,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef certdetail(request, cert_id=None, cert_sha256=None):\n if cert_sha256:\n cert_sha256_bin = cert_sha256.decode('hex')\n cert = get_object_or_404(Certificate, certificate__sha256=\n cert_sha256_bin)\n if cert_id:\n cert = get_object_or_404(Certificate, pk=cert_id)\n cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()\n digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1\n ]\n return render(request, 'observer/certdetail.html', {'certificate': cert,\n 'ca_certificate': cacert, 'keysize_distribution': 'TODO',\n 'digest_sha256': digest_sha256})\n\n\n<mask token>\n\n\ndef logdetail(request, log_id):\n log = get_object_or_404(CtLog, pk=log_id)\n number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()\n return render(request, 'observer/logdetail.html', {'log': log,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\n<mask token>\n\n\ndef issues(request):\n return render(request, 'observer/issues.html')\n\n\ndef status(request):\n status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':\n 'ok'}\n try:\n with open('/static/data/status.json', 'r') as f:\n status = json.load(f)\n status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['analyzer']['lastrun'])\n status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['monitor']['lastrun'])\n except Exception as e:\n status['msg'] = 'Could not load status file.' + str(e)\n return render(request, 'observer/status.html', {'status': status})\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\[email protected]\ndef get_item(dictionary, key):\n return dictionary.get(key)\n\n\nclass FastCountQuerySet:\n\n def __init__(self, queryset, tablename):\n self.queryset = queryset\n self.tablename = tablename\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',\n [self.tablename])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, item):\n return self.queryset[item]\n\n\nclass MetadataCountQuerySet:\n\n def __init__(self, queryset, propertyname):\n self.queryset = queryset\n self.propertyname = propertyname\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',\n [self.propertyname])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, key):\n return self.queryset[key]\n\n\ndef index(request):\n metadata = {}\n expired_certs = 0\n active_certs = 0\n total_certs = 0\n total_cas = 0\n messages = []\n if 'subok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'\n })\n if 'unsubok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'\n })\n subscribeform = SubscribeUnsubscribeForm()\n with connection.cursor() as c:\n c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')\n rows = c.fetchall()\n for row in rows:\n metadata[row[0]] = row[1]\n return render(request, 'observer/index.html', {'total_certs': metadata[\n 'number_of_certs'], 'total_ca': metadata['number_of_cas'],\n 'total_logs': CtLog.objects.count(), 'active_certs': metadata[\n 'number_of_active_certs'], 'expired_certs': metadata[\n 'number_of_expired_certs'], 'revoked_certs': metadata[\n 'number_of_revoked_certs'], 'misissued_certs': metadata[\n 'number_of_misissued_certs'], 'behaving_cas': metadata[\n 'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[\n 'number_of_interesting_cas'], 'biggest_log': metadata[\n 'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.\n objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':\n metadata['number_of_certs_in_smallest_log'], 'uptime_days': (\n timezone.now().date() - datetime.date(2015, 10, 14)).days,\n 'messages': messages, 'subscribeform': subscribeform})\n\n\ndef search(request):\n term = request.GET.get('term', '')\n return render(request, 'observer/search.html', {'term': term})\n\n\ndef caall(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n page = int(page)\n list_of_certs = []\n filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(\n 'common_name'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/cas.html', {'list_of_ca':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef certall(request, page=None, ae=None, issuer_ca=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n ae = request.GET.get('algorithm')\n issuer_ca = request.GET.get('issuer_ca')\n date_notbefore = request.GET.get('date_notbefore')\n date_notbefore_gte = request.GET.get('date_notbefore_gte')\n is_active = request.GET.get('is_active')\n date_notafter = request.GET.get('date_notafter')\n date_notafter_lte = request.GET.get('date_notafter_lte')\n page = int(page)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(\n Certificate.objects.all().order_by('-id'), 'certificate'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),\n 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '1' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_before__lte=timezone.now(), not_after__gte=timezone.now\n ()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_before__lte\n =timezone.now(), not_after__gte=timezone.now()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '0' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_after__lte=datetime.date.today()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_after__lte=\n datetime.date.today()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\n<mask token>\n\n\ndef certexpired(request, page=None, order=None):\n if page == None:\n return HttpResponsePermanentRedirect('expired/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(\n not_after__lt=timezone.now()), 'number_of_expired_certs'),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certrevoked(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('revoked/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(Certificate.objects.filter(id__in=\n RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE\n )\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certs_by_log(request, log_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('./1')\n page = int(page)\n log_id = int(log_id)\n list_of_certs = []\n paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_entries = paginator.page(page)\n return render(request, 'observer/log_certs.html', {'log':\n get_object_or_404(CtLog, pk=log_id), 'list_of_entries':\n list_of_entries})\n\n\ndef certs_by_ca(request, ca_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('certificates/1')\n page = int(page)\n ca_id = int(ca_id)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.\n filter(issuer_ca=ca_id))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef list_cn_certs(request, cn):\n field_id = 'common name'\n expression = cn\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [cn])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef list_dnsname_certs(request, dnsname):\n field_id = 'dnsname'\n expression = dnsname\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [dnsname])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef log(request):\n return render(request, 'observer/logs.html', {'list_of_logs': CtLog.\n objects.all().order_by('-is_active', '-latest_entry_id', 'name')})\n\n\ndef cadetail(request, ca_id):\n ca = get_object_or_404(Ca, pk=ca_id)\n number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()\n return render(request, 'observer/cadetail.html', {'ca': ca,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef certdetail(request, cert_id=None, cert_sha256=None):\n if cert_sha256:\n cert_sha256_bin = cert_sha256.decode('hex')\n cert = get_object_or_404(Certificate, certificate__sha256=\n cert_sha256_bin)\n if cert_id:\n cert = get_object_or_404(Certificate, pk=cert_id)\n cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()\n digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1\n ]\n return render(request, 'observer/certdetail.html', {'certificate': cert,\n 'ca_certificate': cacert, 'keysize_distribution': 'TODO',\n 'digest_sha256': digest_sha256})\n\n\n<mask token>\n\n\ndef logdetail(request, log_id):\n log = get_object_or_404(CtLog, pk=log_id)\n number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()\n return render(request, 'observer/logdetail.html', {'log': log,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef flag(request, flag_id):\n try:\n with open(os.path.join(BASE_DIR, 'static/flags/png/{0}.png'.format(\n flag_id.lower())), 'rb') as f:\n return HttpResponse(f.read(), content_type='image/png')\n except IOError:\n with open(os.path.join(BASE_DIR, 'static/flags/png/-.png'), 'rb') as f:\n return HttpResponse(f.read(), content_type='image/png')\n\n\n<mask token>\n\n\ndef issues(request):\n return render(request, 'observer/issues.html')\n\n\ndef status(request):\n status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':\n 'ok'}\n try:\n with open('/static/data/status.json', 'r') as f:\n status = json.load(f)\n status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['analyzer']['lastrun'])\n status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['monitor']['lastrun'])\n except Exception as e:\n status['msg'] = 'Could not load status file.' + str(e)\n return render(request, 'observer/status.html', {'status': status})\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\[email protected]\ndef get_item(dictionary, key):\n return dictionary.get(key)\n\n\nclass FastCountQuerySet:\n\n def __init__(self, queryset, tablename):\n self.queryset = queryset\n self.tablename = tablename\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT reltuples FROM pg_class WHERE relname = %s',\n [self.tablename])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, item):\n return self.queryset[item]\n\n\nclass MetadataCountQuerySet:\n\n def __init__(self, queryset, propertyname):\n self.queryset = queryset\n self.propertyname = propertyname\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute('SELECT name_value FROM metadata WHERE name_type = %s',\n [self.propertyname])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n\n def __getitem__(self, key):\n return self.queryset[key]\n\n\ndef index(request):\n metadata = {}\n expired_certs = 0\n active_certs = 0\n total_certs = 0\n total_cas = 0\n messages = []\n if 'subok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'\n })\n if 'unsubok' in request.GET:\n messages.append({'class': 'alert-info', 'text':\n '<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'\n })\n subscribeform = SubscribeUnsubscribeForm()\n with connection.cursor() as c:\n c.execute('SELECT NAME_TYPE, NAME_VALUE FROM metadata')\n rows = c.fetchall()\n for row in rows:\n metadata[row[0]] = row[1]\n return render(request, 'observer/index.html', {'total_certs': metadata[\n 'number_of_certs'], 'total_ca': metadata['number_of_cas'],\n 'total_logs': CtLog.objects.count(), 'active_certs': metadata[\n 'number_of_active_certs'], 'expired_certs': metadata[\n 'number_of_expired_certs'], 'revoked_certs': metadata[\n 'number_of_revoked_certs'], 'misissued_certs': metadata[\n 'number_of_misissued_certs'], 'behaving_cas': metadata[\n 'number_of_correctly_behaving_cas'], 'interesting_cas': metadata[\n 'number_of_interesting_cas'], 'biggest_log': metadata[\n 'number_of_certs_in_biggest_log'], 'biggest_log_name': CtLog.\n objects.get(id=metadata['biggest_log_id']).name, 'smallest_log':\n metadata['number_of_certs_in_smallest_log'], 'uptime_days': (\n timezone.now().date() - datetime.date(2015, 10, 14)).days,\n 'messages': messages, 'subscribeform': subscribeform})\n\n\ndef search(request):\n term = request.GET.get('term', '')\n return render(request, 'observer/search.html', {'term': term})\n\n\ndef caall(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n page = int(page)\n list_of_certs = []\n filtered_qs = CaFilter(request.GET, queryset=Ca.objects.all().order_by(\n 'common_name'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/cas.html', {'list_of_ca':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef certall(request, page=None, ae=None, issuer_ca=None):\n if page == None:\n return HttpResponsePermanentRedirect('all/1')\n ae = request.GET.get('algorithm')\n issuer_ca = request.GET.get('issuer_ca')\n date_notbefore = request.GET.get('date_notbefore')\n date_notbefore_gte = request.GET.get('date_notbefore_gte')\n is_active = request.GET.get('is_active')\n date_notafter = request.GET.get('date_notafter')\n date_notafter_lte = request.GET.get('date_notafter_lte')\n page = int(page)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=MetadataCountQuerySet(\n Certificate.objects.all().order_by('-id'), 'certificate'))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n query = FastCountQuerySet(Certificate.objects.all().order_by('-id'),\n 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '1' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_before__lte=timezone.now(), not_after__gte=timezone.now\n ()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_before__lte\n =timezone.now(), not_after__gte=timezone.now()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active == '0' or is_active == '' or is_active == None:\n if issuer_ca != None and (is_active == None or is_active == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and (issuer_ca == None or issuer_ca == ''):\n query = FastCountQuerySet(Certificate.objects.filter(\n not_after__lte=datetime.date.today()), 'certificate')\n if issuer_ca == '' and is_active == '':\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n if is_active != None and issuer_ca != None:\n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains=issuer_ca, not_after__lte=\n datetime.date.today()), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\n<mask token>\n\n\ndef certexpired(request, page=None, order=None):\n if page == None:\n return HttpResponsePermanentRedirect('expired/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(\n not_after__lt=timezone.now()), 'number_of_expired_certs'),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certrevoked(request, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('revoked/1')\n page = int(page)\n list_of_certs = []\n paginator = Paginator(Certificate.objects.filter(id__in=\n RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE\n )\n if page in paginator.page_range:\n list_of_certs = paginator.page(page)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs})\n\n\ndef certs_by_log(request, log_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('./1')\n page = int(page)\n log_id = int(log_id)\n list_of_certs = []\n paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id),\n ITEMS_PER_PAGE)\n if page in paginator.page_range:\n list_of_entries = paginator.page(page)\n return render(request, 'observer/log_certs.html', {'log':\n get_object_or_404(CtLog, pk=log_id), 'list_of_entries':\n list_of_entries})\n\n\ndef certs_by_ca(request, ca_id, page=None):\n if page == None:\n return HttpResponsePermanentRedirect('certificates/1')\n page = int(page)\n ca_id = int(ca_id)\n list_of_certs = []\n filtered_qs = CertFilter(request.GET, queryset=Certificate.objects.\n filter(issuer_ca=ca_id))\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n return render(request, 'observer/certs.html', {'list_of_certs':\n list_of_certs, 'filter': filtered_qs})\n\n\ndef list_cn_certs(request, cn):\n field_id = 'common name'\n expression = cn\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [cn])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef list_dnsname_certs(request, dnsname):\n field_id = 'dnsname'\n expression = dnsname\n list_of_certs = Certificate.objects.raw(\n \"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\"\n , [dnsname])\n issues = issuefinder.get_all_issues(list(list_of_certs))\n return render(request, 'observer/history.html', {'field_id': field_id,\n 'expression': expression, 'list_of_certs': list_of_certs, 'issues':\n issues})\n\n\ndef log(request):\n return render(request, 'observer/logs.html', {'list_of_logs': CtLog.\n objects.all().order_by('-is_active', '-latest_entry_id', 'name')})\n\n\ndef cadetail(request, ca_id):\n ca = get_object_or_404(Ca, pk=ca_id)\n number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()\n return render(request, 'observer/cadetail.html', {'ca': ca,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef certdetail(request, cert_id=None, cert_sha256=None):\n if cert_sha256:\n cert_sha256_bin = cert_sha256.decode('hex')\n cert = get_object_or_404(Certificate, certificate__sha256=\n cert_sha256_bin)\n if cert_id:\n cert = get_object_or_404(Certificate, pk=cert_id)\n cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()\n digest_sha256 = str(cert.get_digest_sha256()).replace(':', '').lower()[2:-1\n ]\n return render(request, 'observer/certdetail.html', {'certificate': cert,\n 'ca_certificate': cacert, 'keysize_distribution': 'TODO',\n 'digest_sha256': digest_sha256})\n\n\n<mask token>\n\n\ndef logdetail(request, log_id):\n log = get_object_or_404(CtLog, pk=log_id)\n number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()\n return render(request, 'observer/logdetail.html', {'log': log,\n 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef flag(request, flag_id):\n try:\n with open(os.path.join(BASE_DIR, 'static/flags/png/{0}.png'.format(\n flag_id.lower())), 'rb') as f:\n return HttpResponse(f.read(), content_type='image/png')\n except IOError:\n with open(os.path.join(BASE_DIR, 'static/flags/png/-.png'), 'rb') as f:\n return HttpResponse(f.read(), content_type='image/png')\n\n\ndef imprint(request):\n return render(request, 'observer/imprint.html')\n\n\ndef issues(request):\n return render(request, 'observer/issues.html')\n\n\ndef status(request):\n status = {'analyzer': {'lastrun': 0}, 'monitor': {'lastrun': 0}, 'msg':\n 'ok'}\n try:\n with open('/static/data/status.json', 'r') as f:\n status = json.load(f)\n status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['analyzer']['lastrun'])\n status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status\n ['monitor']['lastrun'])\n except Exception as e:\n status['msg'] = 'Could not load status file.' + str(e)\n return render(request, 'observer/status.html', {'status': status})\n\n\n<mask token>\n",
"step-5": "from django.shortcuts import render, get_object_or_404\nfrom django.utils import timezone\nfrom django.db.models import Count\nfrom django.db.models import QuerySet\nfrom django.db import connection\nfrom django.core.paginator import Paginator, PageNotAnInteger\nfrom django.http import HttpResponse\nfrom django.http import HttpResponsePermanentRedirect\nimport datetime\nimport os\nimport json\nfrom ctobservatory.settings import BASE_DIR\nfrom .models import *\nfrom notification.forms import SubscribeUnsubscribeForm\n#from .issuefinder import *\nimport observer.issuefinder as issuefinder\nfrom django.template.defaulttags import register\nimport hashlib\nimport psycopg2\nITEMS_PER_PAGE = 50\n\[email protected]\ndef get_item(dictionary, key):\n return dictionary.get(key)\n\nclass FastCountQuerySet():\n def __init__(self, queryset, tablename):\n self.queryset = queryset\n self.tablename = tablename\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute(\"SELECT reltuples FROM pg_class WHERE relname = %s\", [self.tablename])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n # passthrough all the other methods\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n \n def __getitem__(self, item):\n return self.queryset[item]\n\nclass MetadataCountQuerySet():\n def __init__(self, queryset, propertyname):\n self.queryset = queryset\n self.propertyname = propertyname\n\n def count(self):\n cursor = connection.cursor()\n cursor.execute(\"SELECT name_value FROM metadata WHERE name_type = %s\", [self.propertyname])\n row = cursor.fetchone()\n count = int(row[0])\n cursor.close()\n return count\n\n # passthrough all the other methods\n def __getattr__(self, attr):\n try:\n return object.__getattr__(self, attr)\n except AttributeError:\n return getattr(self.queryset, attr)\n \n def __getitem__(self, key):\n return self.queryset[key]\n \n\n\ndef index(request):\n metadata = {}\n expired_certs = 0\n active_certs = 0\n total_certs = 0\n total_cas = 0\n \n messages = []\n if('subok' in request.GET):\n messages.append({'class':'alert-info','text':'<strong>Subscription request</strong> - We sent you a confirmation link via email. Click it, and you should be all set.'})\n if('unsubok' in request.GET):\n messages.append({'class':'alert-info','text':'<strong>Unsubscription request</strong> - We sent you a confirmation link via email. sClick it, and you should be all set.'})\n \n subscribeform = SubscribeUnsubscribeForm()\n \n with connection.cursor() as c:\n c.execute(\"SELECT NAME_TYPE, NAME_VALUE FROM metadata\")\n rows = c.fetchall()\n for row in rows:\n metadata[row[0]] = row[1]\n\n return render(request, 'observer/index.html',\n {\n 'total_certs': metadata['number_of_certs'],\n 'total_ca': metadata['number_of_cas'],\n 'total_logs': CtLog.objects.count(),\n 'active_certs': metadata['number_of_active_certs'],\n 'expired_certs': metadata['number_of_expired_certs'],\n 'revoked_certs': metadata['number_of_revoked_certs'],\n 'misissued_certs': metadata['number_of_misissued_certs'],\n 'behaving_cas' : metadata['number_of_correctly_behaving_cas'],\n 'interesting_cas' : metadata['number_of_interesting_cas'],\n 'biggest_log' : metadata['number_of_certs_in_biggest_log'],\n 'biggest_log_name' : CtLog.objects.get(id=metadata['biggest_log_id']).name,\n 'smallest_log' : metadata['number_of_certs_in_smallest_log'],\n 'uptime_days': (timezone.now().date()-datetime.date(2015,10,14)).days, #TODO\n 'messages' : messages,\n 'subscribeform' : subscribeform\n }\n )\n\ndef search(request):\n term = request.GET.get(\"term\",\"\")\n\n #found_ca = Ca.objects.filter(name__icontains=term)\n #found_cn_dnsname = Certificate.objects.raw(\"SELECT DISTINCT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, x509_notBefore(CERTIFICATE) FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE (NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE)) LIKE reverse(lower(%s))) OR (NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE)) LIKE reverse(lower(%s)))\n #ORDER BY x509_notBefore(CERTIFICATE) DESC\", [term, term])\n\n return render(request, 'observer/search.html',\n {\n 'term' : term\n #'found_ca' : found_ca,\n #'found_cn_dnsname' : found_cn_dnsname\n }\n )\n\ndef caall(request, page=None): #VIEW FOR CAs\n \n if(page==None):\n return HttpResponsePermanentRedirect(\"all/1\")\n \n\n page = int(page)\n\n list_of_certs = []\n \n filtered_qs = CaFilter(\n request.GET, \n queryset=Ca.objects.all().order_by('common_name')\n )\n\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n \n return render(request, 'observer/cas.html',\n {\n 'list_of_ca': list_of_certs, \n 'filter': filtered_qs#Ca.objects.annotate(num_certs=Count('certificate')).order_by('-num_certs'),\n }\n )\n\ndef certall(request, page=None, ae=None, issuer_ca=None): #VIEW FOR Certificates->ALL\n\n if(page==None):\n return HttpResponsePermanentRedirect(\"all/1\")\n\n ae = request.GET.get(\"algorithm\")\n issuer_ca = request.GET.get(\"issuer_ca\")\n date_notbefore = request.GET.get(\"date_notbefore\")\n date_notbefore_gte = request.GET.get(\"date_notbefore_gte\")\n is_active = request.GET.get(\"is_active\")\n date_notafter = request.GET.get(\"date_notafter\")\n date_notafter_lte = request.GET.get(\"date_notafter_lte\")\n \n page = int(page)\n\n list_of_certs = []\n\n \n\n filtered_qs = CertFilter(\n request.GET, \n queryset=MetadataCountQuerySet(Certificate.objects.all().order_by('-id'), 'certificate')\n )\n \n\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n\n\n \n #Alternative filter solution for better performance\n #https://localhost/cert/all/1?issuer_ca=merge&date_notbefore=&date_notbefore_gte=&is_active=&date_notafter=&date_notafter_lte=\n \n query = FastCountQuerySet(Certificate.objects.all().order_by('-id'), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active == \"1\" or is_active == \"\" or is_active == None):\n if(issuer_ca != None and (is_active == None or is_active == \"\")):\n query = FastCountQuerySet(Certificate.objects.filter(issuer_ca__common_name__contains = issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active != None and (issuer_ca == None or issuer_ca == \"\")):\n query = FastCountQuerySet(Certificate.objects.filter(not_before__lte=timezone.now(), not_after__gte=timezone.now()), 'certificate')\n \n if(issuer_ca == \"\" and is_active == \"\"):\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active != None and issuer_ca != None ): \n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains = issuer_ca,\n not_before__lte=timezone.now(), not_after__gte=timezone.now(), ), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active == \"0\" or is_active == \"\" or is_active == None):\n if(issuer_ca != None and (is_active == None or is_active == \"\")):\n query = FastCountQuerySet(Certificate.objects.filter(issuer_ca__common_name__contains = issuer_ca), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active != None and (issuer_ca == None or issuer_ca == \"\")):\n query = FastCountQuerySet(Certificate.objects.filter(not_after__lte=datetime.date.today()), 'certificate')\n \n if(issuer_ca == \"\" and is_active == \"\"):\n query = FastCountQuerySet(Certificate.objects.all(), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n if(is_active != None and issuer_ca != None ): \n query = FastCountQuerySet(Certificate.objects.filter(\n issuer_ca__common_name__contains = issuer_ca,\n not_after__lte=datetime.date.today() ), 'certificate')\n paginator = Paginator(query, ITEMS_PER_PAGE)\n \n \n ####################################################\n \n\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n \n #if(ae != None):\n #list_of_certs = Certificate.objects.raw(\"SELECT * FROM certificate WHERE SIGNATURE_ALGORITHM=%s\", [ae])\n \n \n return render(request, 'observer/certs.html',\n {\n 'list_of_certs': list_of_certs, \n 'filter': filtered_qs\n }\n )\n\ndef certactive(request, page=None):\n\n if(page==None):\n return HttpResponsePermanentRedirect(\"active/1\")\n\n page = int(page)\n\n list_of_certs = []\n\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(not_before__lte=timezone.now(), not_after__gte=timezone.now()), 'number_of_active_certs'), ITEMS_PER_PAGE)\n if(page in paginator.page_range):\n list_of_certs = paginator.page(page)\n\n return render(request, 'observer/certs.html',\n {\n 'list_of_certs': list_of_certs\n }\n )\n\ndef certexpired(request, page=None, order=None):\n if(page==None):\n return HttpResponsePermanentRedirect(\"expired/1\")\n\n\n page = int(page)\n\n list_of_certs = []\n\n paginator = Paginator(MetadataCountQuerySet(Certificate.objects.filter(not_after__lt=timezone.now()), 'number_of_expired_certs'), ITEMS_PER_PAGE)\n# paginator = Paginator(Certificate.objects.filter(not_after__lt=timezone.now()), ITEMS_PER_PAGE)\n if(page in paginator.page_range):\n list_of_certs = paginator.page(page)\n\n return render(request, 'observer/certs.html',\n {\n 'list_of_certs': list_of_certs\n }\n )\ndef certrevoked(request, page=None):\n if(page==None):\n return HttpResponsePermanentRedirect(\"revoked/1\")\n\n page = int(page)\n\n list_of_certs = []\n\n paginator = Paginator(Certificate.objects.filter(id__in=RevokedCertificate.objects.all().values('certificate')), ITEMS_PER_PAGE)\n if(page in paginator.page_range):\n list_of_certs = paginator.page(page)\n\n return render(request, 'observer/certs.html',\n {\n 'list_of_certs': list_of_certs\n }\n )\n\ndef certs_by_log(request, log_id, page=None):\n if(page==None):\n return HttpResponsePermanentRedirect(\"./1\")\n\n page = int(page)\n log_id = int(log_id)\n \n list_of_certs = []\n \n paginator = Paginator(CtLogEntry.objects.filter(ct_log=log_id), ITEMS_PER_PAGE)\n if(page in paginator.page_range):\n list_of_entries = paginator.page(page)\n \n\n return render(request, 'observer/log_certs.html',\n {\n 'log': get_object_or_404(CtLog, pk=log_id),\n 'list_of_entries' : list_of_entries\n }\n )\n\ndef certs_by_ca(request, ca_id, page=None):\n\n if(page==None):\n return HttpResponsePermanentRedirect(\"certificates/1\")\n\n page = int(page)\n ca_id = int(ca_id)\n\n list_of_certs = []\n \n \n \n filtered_qs = CertFilter(\n request.GET, \n queryset=Certificate.objects.filter(issuer_ca=ca_id)\n )\n\n paginator = Paginator(filtered_qs.qs, ITEMS_PER_PAGE)\n page = request.GET.get('page')\n\n try:\n list_of_certs = paginator.page(page)\n except PageNotAnInteger:\n list_of_certs = paginator.page(1)\n \n \n return render(request, 'observer/certs.html',\n {\n 'list_of_certs': list_of_certs, \n 'filter': filtered_qs\n })\n \n \n\n# paginator = Paginator(Certificate.objects.filter(issuer_ca=ca_id), ITEMS_PER_PAGE)\n# if(page in paginator.page_range):\n# list_of_certs = paginator.page(page)\n\n# return render(request, 'observer/certs.html',\n# {\n# 'list_of_certs': list_of_certs\n# }\n# )\n\ndef list_cn_certs(request, cn):\n\n field_id = 'common name'\n expression = cn\n\n list_of_certs = Certificate.objects.raw(\"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='commonName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\", [cn])\n #list_of_certs = Certificate.objects.filter(certificate__common_name=cn).order_by('not_before')\n \n \n issues = issuefinder.get_all_issues(list(list_of_certs))\n #issues = issuefinder.get_first_certificates(list_of_certs)\n\n return render(request, 'observer/history.html',\n {\n 'field_id': field_id,\n 'expression': expression,\n 'list_of_certs': list_of_certs,\n 'issues':issues\n }\n )\n\ndef list_dnsname_certs(request, dnsname):\n\n field_id = 'dnsname'\n expression = dnsname\n\n list_of_certs = Certificate.objects.raw(\"SELECT c.ID, c.CERTIFICATE, c.ISSUER_CA_ID, c.SERIAL, c.SHA256, c.NOT_BEFORE, c.NOT_AFTER FROM certificate_identity AS ci JOIN certificate AS c ON ci.CERTIFICATE_ID=c.ID WHERE NAME_TYPE='dNSName' AND reverse(lower(NAME_VALUE))=reverse(lower(%s)) ORDER BY c.NOT_BEFORE ASC\", [dnsname])\n \n issues = issuefinder.get_all_issues(list(list_of_certs))\n \n return render(request, 'observer/history.html',\n {\n 'field_id': field_id,\n 'expression': expression,\n 'list_of_certs': list_of_certs,\n 'issues':issues\n }\n )\n\ndef log(request): #LOG VIEW\n return render(request, 'observer/logs.html',\n {\n #'list_of_logs': CtLog.objects.all().annotate(entries=Count('ctlogentry')).order_by('latest_entry_id')\n 'list_of_logs': CtLog.objects.all().order_by('-is_active','-latest_entry_id','name')\n }\n )\n\ndef cadetail(request,ca_id):\n ca = get_object_or_404(Ca, pk=ca_id)\n \n #counting number of issued CA's:\n number_of_issued_ca = Certificate.objects.filter(issuer_ca=ca_id).count()\n \n return render(request, 'observer/cadetail.html', { 'ca' : ca, 'number_of_issued_ca': number_of_issued_ca})\n\n\ndef certdetail(request,cert_id=None,cert_sha256=None):\n if cert_sha256:\n cert_sha256_bin = cert_sha256.decode('hex') #Does not work on python3\n cert = get_object_or_404(Certificate, certificate__sha256=cert_sha256_bin)\n if cert_id:\n cert = get_object_or_404(Certificate, pk=cert_id)\n cacert = CaCertificate.objects.filter(certificate_id=cert_id).first()\n digest_sha256 = str(cert.get_digest_sha256()).replace(':','').lower()[2:-1]\n\n #TODO\n #Certificate.objects.raw(\"select (select count(*) from certificate WHERE x509_keySize(certificate) = %s)*100/cast(COUNT(*) as float) as percentage, 0 as id FROM certificate;\",\n #[cert.get_x509_data().get_pubkey().bits()])\n\n #return render(request, 'observer/certdetail.html', { 'certificate' : cert, 'ca_certificate' : cacert, 'keysize_distribution': round(keysize_distribution[0].percentage,2)})\n return render(request, 'observer/certdetail.html', { 'certificate' : cert, 'ca_certificate' : cacert, 'keysize_distribution': 'TODO', 'digest_sha256':digest_sha256})\n\ndef certraw(request,cert_id):\n cert = get_object_or_404(Certificate, pk=cert_id)\n \n response = HttpResponse(cert.certificate, content_type='application/octet-stream')\n response['Content-Disposition'] = 'attachment; filename=\"certificate_{}.crt'.format(cert_id)\n return response\n\ndef logdetail(request,log_id):\n log = get_object_or_404(CtLog, pk=log_id)\n \n number_of_issued_ca = CtLogEntry.objects.filter(ct_log=log_id).count()\n return render(request, 'observer/logdetail.html', { 'log' : log, 'number_of_issued_ca' : number_of_issued_ca})\n\ndef flag(request, flag_id):\n try:\n with open(os.path.join(BASE_DIR, \"static/flags/png/{0}.png\".format(flag_id.lower())), \"rb\") as f:\n return HttpResponse(f.read(), content_type=\"image/png\")\n except IOError:\n with open(os.path.join(BASE_DIR, \"static/flags/png/-.png\"), \"rb\") as f:\n return HttpResponse(f.read(), content_type=\"image/png\")\n\ndef imprint(request):\n return render(request, 'observer/imprint.html')\n \ndef issues(request):\n return render(request, 'observer/issues.html')\n \ndef status(request):\n status = {'analyzer':{'lastrun':0}, 'monitor':{'lastrun':0}, 'msg':'ok'}\n try:\n with open('/static/data/status.json', 'r') as f:\n status = json.load(f)\n \n status['analyzer']['lastrun'] = datetime.datetime.fromtimestamp(status['analyzer']['lastrun'])\n status['monitor']['lastrun'] = datetime.datetime.fromtimestamp(status['monitor']['lastrun'])\n except Exception as e:\n status['msg'] = \"Could not load status file.\"+str(e)\n \n \n return render(request, 'observer/status.html', {'status':status})\n\n\ndef certcheck(request):\n \n if request.method == 'POST':\n \n serial_post = request.POST['serial']\n \n sqlQuery = \"\"\"SELECT id FROM certificate WHERE serial=%s\"\"\"\n sqlQuery_commonName = \"\"\"SELECT * FROM ca WHERE \"\"\"\n \n \n current_time = str(datetime.datetime.now())\n \n serial_int = int(serial_post, 16)\n serial = serial_int.to_bytes((serial_int.bit_length() + 15) // 8, 'big', signed=True) or b'\\0'\n sqlData = (psycopg2.Binary(serial),)\n \n found_serial = Certificate.objects.raw(sqlQuery, sqlData)\n \n if(found_serial):\n return HttpResponse(found_serial)\n else:\n return HttpResponse(\"none\")\n \n\n return render(request, 'observer/checkserial.html', {})\n",
"step-ids": [
25,
26,
28,
29,
35
]
}
|
[
25,
26,
28,
29,
35
] |
version https://git-lfs.github.com/spec/v1
oid sha256:a2959c4cccf29b3797cc2e2dcef87ddb5a0779d9fb992bb38e190b791ae37eb0
size 88352
|
normal
|
{
"blob_id": "932bb7c9dbf3e97c966d2d7d537e747756831e30",
"index": 608,
"step-1": "version https://git-lfs.github.com/spec/v1\noid sha256:a2959c4cccf29b3797cc2e2dcef87ddb5a0779d9fb992bb38e190b791ae37eb0\nsize 88352\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import cachetools
cache = cachetools.LRUCache(maxsize = 3)
cache['PyCon'] = 'India'
cache['year'] = '2017'
print("Older: " + cache['year'])
cache['year'] = '2018'
print("Newer: " + cache['year'])
print(cache)
cache['sdate'] = '05/09/2018'
print(cache)
cache['edate'] = '09/09/2018'
print(cache)
|
normal
|
{
"blob_id": "aebc918d6a1d1d2473f74d77b8a915ac25548e3a",
"index": 443,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('Older: ' + cache['year'])\n<mask token>\nprint('Newer: ' + cache['year'])\nprint(cache)\n<mask token>\nprint(cache)\n<mask token>\nprint(cache)\n",
"step-3": "<mask token>\ncache = cachetools.LRUCache(maxsize=3)\ncache['PyCon'] = 'India'\ncache['year'] = '2017'\nprint('Older: ' + cache['year'])\ncache['year'] = '2018'\nprint('Newer: ' + cache['year'])\nprint(cache)\ncache['sdate'] = '05/09/2018'\nprint(cache)\ncache['edate'] = '09/09/2018'\nprint(cache)\n",
"step-4": "import cachetools\ncache = cachetools.LRUCache(maxsize=3)\ncache['PyCon'] = 'India'\ncache['year'] = '2017'\nprint('Older: ' + cache['year'])\ncache['year'] = '2018'\nprint('Newer: ' + cache['year'])\nprint(cache)\ncache['sdate'] = '05/09/2018'\nprint(cache)\ncache['edate'] = '09/09/2018'\nprint(cache)\n",
"step-5": "import cachetools\n\ncache = cachetools.LRUCache(maxsize = 3)\ncache['PyCon'] = 'India'\ncache['year'] = '2017'\nprint(\"Older: \" + cache['year'])\n\ncache['year'] = '2018'\nprint(\"Newer: \" + cache['year'])\nprint(cache)\n\ncache['sdate'] = '05/09/2018'\nprint(cache)\n\ncache['edate'] = '09/09/2018'\nprint(cache)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class OvsApi(object):
<|reserved_special_token_0|>
def __init__(self, ip, protocol='tcp', port='6640', timeout=10):
super(OvsApi, self).__init__()
self.ip = ip
self.protocol = protocol
self.port = port
self.vsctl_timeout = timeout
self.ovsdb = None
self.integration_bridge = cfg.CONF.df.integration_bridge
if cfg.CONF.log_dir:
vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)
else:
vlog.Vlog.init()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def set_controller_fail_mode(self, bridge, fail_mode):
self.ovsdb.set_fail_mode(bridge, fail_mode).execute()
<|reserved_special_token_0|>
def check_controller_fail_mode(self, fail_mode):
return fail_mode == self._db_get_val('Bridge', self.
integration_bridge, 'fail_mode')
def get_virtual_tunnel_ports(self):
ifaces = self.ovsdb.db_find('Interface', ('options', '=', {
'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()
tunnel_ports = []
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=
iface['name'], tunnel_type=iface['type']))
return tunnel_ports
def add_virtual_tunnel_port(self, tunnel_type):
self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()
def delete_port(self, switch_port):
self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(
)
@staticmethod
def _check_ofport(port_name, ofport):
if ofport is None:
LOG.warning("Can't find ofport for port %s.", port_name)
return False
if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:
LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',
{'ofport': ofport, 'port': port_name})
return False
return True
def get_interface_by_id_with_specified_columns(self, port_id,
specified_columns):
columns = {'external_ids', 'name'}
columns.update(specified_columns)
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=columns).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface
<|reserved_special_token_0|>
def get_local_port_mac_in_use(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'mac_in_use'})
if iface and netaddr.valid_mac(iface['mac_in_use']):
return iface['mac_in_use']
def _get_port_name_by_id(self, port_id):
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', 'name']).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface['name']
<|reserved_special_token_0|>
def map_patch_to_network(self, network, patch_name):
self.bridge_mapping[network] = patch_name
def get_phy_network_ofport(self, network):
patch_name = self.bridge_mapping.get(network)
if patch_name:
return self.get_port_ofport(patch_name)
def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=
None, peer_link_name=None):
links = self._gen_link_mapping(local_bridge, peer_bridge,
local_link_name, peer_link_name)
self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])
self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])
return links
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_port_ofport(self, port):
return self._db_get_val('Interface', port, 'ofport', check_error=
False, log_errors=False)
def get_port_mac_in_use(self, port):
return self._db_get_val('Interface', port, 'mac_in_use',
check_error=False, log_errors=False)
def get_port_qos(self, port_id):
port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()
if port_qoses:
ovsdb_qos = port_qoses[0]
external_ids = ovsdb_qos['external_ids']
return qos.QosPolicy(id=external_ids.get('qos-id'), topic=
external_ids.get('qos-topic'), version=external_ids.get(
'version'))
<|reserved_special_token_0|>
def update_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', max_kbps), (
'ingress_policing_burst', max_burst_kbps)))
txn.add(self.ovsdb.update_qos(port_id, qos))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class OvsApi(object):
<|reserved_special_token_0|>
def __init__(self, ip, protocol='tcp', port='6640', timeout=10):
super(OvsApi, self).__init__()
self.ip = ip
self.protocol = protocol
self.port = port
self.vsctl_timeout = timeout
self.ovsdb = None
self.integration_bridge = cfg.CONF.df.integration_bridge
if cfg.CONF.log_dir:
vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)
else:
vlog.Vlog.init()
def initialize(self, nb_api):
db_connection = '%s:%s:%s' % (self.protocol, self.ip, self.port)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_STARTED, None)
self.ovsdb = impl_idl.DFOvsdbApi(nb_api, db_connection, self.
vsctl_timeout)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_FINISHED, None)
def _db_get_val(self, table, record, column, check_error=False,
log_errors=True):
return self.ovsdb.db_get(table, record, column).execute(check_error
=check_error, log_errors=log_errors)
<|reserved_special_token_0|>
def set_controller(self, bridge, targets):
self.ovsdb.set_controller(bridge, targets).execute()
def set_controller_fail_mode(self, bridge, fail_mode):
self.ovsdb.set_fail_mode(bridge, fail_mode).execute()
<|reserved_special_token_0|>
def check_controller_fail_mode(self, fail_mode):
return fail_mode == self._db_get_val('Bridge', self.
integration_bridge, 'fail_mode')
def get_virtual_tunnel_ports(self):
ifaces = self.ovsdb.db_find('Interface', ('options', '=', {
'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()
tunnel_ports = []
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=
iface['name'], tunnel_type=iface['type']))
return tunnel_ports
def add_virtual_tunnel_port(self, tunnel_type):
self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()
def delete_port(self, switch_port):
self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(
)
@staticmethod
def _check_ofport(port_name, ofport):
if ofport is None:
LOG.warning("Can't find ofport for port %s.", port_name)
return False
if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:
LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',
{'ofport': ofport, 'port': port_name})
return False
return True
def get_interface_by_id_with_specified_columns(self, port_id,
specified_columns):
columns = {'external_ids', 'name'}
columns.update(specified_columns)
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=columns).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface
<|reserved_special_token_0|>
def get_local_port_mac_in_use(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'mac_in_use'})
if iface and netaddr.valid_mac(iface['mac_in_use']):
return iface['mac_in_use']
def _get_port_name_by_id(self, port_id):
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', 'name']).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface['name']
def _gen_link_mapping(self, bridge1, bridge2, bridge1_link_name=None,
bridge2_link_name=None):
if bridge1_link_name is None:
bridge1_link_name = '%s-patch' % bridge2
if bridge2_link_name is None:
bridge2_link_name = '%s-patch' % bridge1
LOG.debug(
'genrated mappings {%(bridge1)s: %(link1)s, %(bridge2)s: %(link2)s}'
, {'bridge1': bridge1, 'link1': bridge1_link_name, 'bridge2':
bridge2, 'link2': bridge2_link_name})
return bridge1_link_name, bridge2_link_name
def map_patch_to_network(self, network, patch_name):
self.bridge_mapping[network] = patch_name
def get_phy_network_ofport(self, network):
patch_name = self.bridge_mapping.get(network)
if patch_name:
return self.get_port_ofport(patch_name)
def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=
None, peer_link_name=None):
links = self._gen_link_mapping(local_bridge, peer_bridge,
local_link_name, peer_link_name)
self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])
self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])
return links
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_port_ofport(self, port):
return self._db_get_val('Interface', port, 'ofport', check_error=
False, log_errors=False)
def get_port_mac_in_use(self, port):
return self._db_get_val('Interface', port, 'mac_in_use',
check_error=False, log_errors=False)
def get_port_qos(self, port_id):
port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()
if port_qoses:
ovsdb_qos = port_qoses[0]
external_ids = ovsdb_qos['external_ids']
return qos.QosPolicy(id=external_ids.get('qos-id'), topic=
external_ids.get('qos-topic'), version=external_ids.get(
'version'))
<|reserved_special_token_0|>
def update_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', max_kbps), (
'ingress_policing_burst', max_burst_kbps)))
txn.add(self.ovsdb.update_qos(port_id, qos))
def clear_port_qos(self, port_id):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', 0), ('ingress_policing_burst', 0)))
txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))
txn.add(self.ovsdb.delete_qos(port_id))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class OvsApi(object):
<|reserved_special_token_0|>
def __init__(self, ip, protocol='tcp', port='6640', timeout=10):
super(OvsApi, self).__init__()
self.ip = ip
self.protocol = protocol
self.port = port
self.vsctl_timeout = timeout
self.ovsdb = None
self.integration_bridge = cfg.CONF.df.integration_bridge
if cfg.CONF.log_dir:
vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)
else:
vlog.Vlog.init()
def initialize(self, nb_api):
db_connection = '%s:%s:%s' % (self.protocol, self.ip, self.port)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_STARTED, None)
self.ovsdb = impl_idl.DFOvsdbApi(nb_api, db_connection, self.
vsctl_timeout)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_FINISHED, None)
def _db_get_val(self, table, record, column, check_error=False,
log_errors=True):
return self.ovsdb.db_get(table, record, column).execute(check_error
=check_error, log_errors=log_errors)
<|reserved_special_token_0|>
def set_controller(self, bridge, targets):
self.ovsdb.set_controller(bridge, targets).execute()
def set_controller_fail_mode(self, bridge, fail_mode):
self.ovsdb.set_fail_mode(bridge, fail_mode).execute()
def check_controller(self, target):
controllers = self.ovsdb.get_controller(self.integration_bridge
).execute()
return target in controllers
def check_controller_fail_mode(self, fail_mode):
return fail_mode == self._db_get_val('Bridge', self.
integration_bridge, 'fail_mode')
def get_virtual_tunnel_ports(self):
ifaces = self.ovsdb.db_find('Interface', ('options', '=', {
'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()
tunnel_ports = []
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=
iface['name'], tunnel_type=iface['type']))
return tunnel_ports
def add_virtual_tunnel_port(self, tunnel_type):
self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()
def delete_port(self, switch_port):
self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(
)
@staticmethod
def _check_ofport(port_name, ofport):
if ofport is None:
LOG.warning("Can't find ofport for port %s.", port_name)
return False
if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:
LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',
{'ofport': ofport, 'port': port_name})
return False
return True
def get_interface_by_id_with_specified_columns(self, port_id,
specified_columns):
columns = {'external_ids', 'name'}
columns.update(specified_columns)
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=columns).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface
def get_port_ofport_by_id(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'name', 'ofport'})
if iface and self._check_ofport(iface['name'], iface['ofport']):
return iface['ofport']
def get_local_port_mac_in_use(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'mac_in_use'})
if iface and netaddr.valid_mac(iface['mac_in_use']):
return iface['mac_in_use']
def _get_port_name_by_id(self, port_id):
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', 'name']).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface['name']
def _gen_link_mapping(self, bridge1, bridge2, bridge1_link_name=None,
bridge2_link_name=None):
if bridge1_link_name is None:
bridge1_link_name = '%s-patch' % bridge2
if bridge2_link_name is None:
bridge2_link_name = '%s-patch' % bridge1
LOG.debug(
'genrated mappings {%(bridge1)s: %(link1)s, %(bridge2)s: %(link2)s}'
, {'bridge1': bridge1, 'link1': bridge1_link_name, 'bridge2':
bridge2, 'link2': bridge2_link_name})
return bridge1_link_name, bridge2_link_name
def map_patch_to_network(self, network, patch_name):
self.bridge_mapping[network] = patch_name
def get_phy_network_ofport(self, network):
patch_name = self.bridge_mapping.get(network)
if patch_name:
return self.get_port_ofport(patch_name)
def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=
None, peer_link_name=None):
links = self._gen_link_mapping(local_bridge, peer_bridge,
local_link_name, peer_link_name)
self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])
self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])
return links
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_port_ofport(self, port):
return self._db_get_val('Interface', port, 'ofport', check_error=
False, log_errors=False)
def get_port_mac_in_use(self, port):
return self._db_get_val('Interface', port, 'mac_in_use',
check_error=False, log_errors=False)
def get_port_qos(self, port_id):
port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()
if port_qoses:
ovsdb_qos = port_qoses[0]
external_ids = ovsdb_qos['external_ids']
return qos.QosPolicy(id=external_ids.get('qos-id'), topic=
external_ids.get('qos-topic'), version=external_ids.get(
'version'))
<|reserved_special_token_0|>
def update_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', max_kbps), (
'ingress_policing_burst', max_burst_kbps)))
txn.add(self.ovsdb.update_qos(port_id, qos))
def clear_port_qos(self, port_id):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', 0), ('ingress_policing_burst', 0)))
txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))
txn.add(self.ovsdb.delete_qos(port_id))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class OvsApi(object):
<|reserved_special_token_0|>
def __init__(self, ip, protocol='tcp', port='6640', timeout=10):
super(OvsApi, self).__init__()
self.ip = ip
self.protocol = protocol
self.port = port
self.vsctl_timeout = timeout
self.ovsdb = None
self.integration_bridge = cfg.CONF.df.integration_bridge
if cfg.CONF.log_dir:
vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)
else:
vlog.Vlog.init()
def initialize(self, nb_api):
db_connection = '%s:%s:%s' % (self.protocol, self.ip, self.port)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_STARTED, None)
self.ovsdb = impl_idl.DFOvsdbApi(nb_api, db_connection, self.
vsctl_timeout)
nb_api.db_change_callback(None, None, constants.
CONTROLLER_OVS_SYNC_FINISHED, None)
def _db_get_val(self, table, record, column, check_error=False,
log_errors=True):
return self.ovsdb.db_get(table, record, column).execute(check_error
=check_error, log_errors=log_errors)
<|reserved_special_token_0|>
def set_controller(self, bridge, targets):
self.ovsdb.set_controller(bridge, targets).execute()
def set_controller_fail_mode(self, bridge, fail_mode):
self.ovsdb.set_fail_mode(bridge, fail_mode).execute()
def check_controller(self, target):
controllers = self.ovsdb.get_controller(self.integration_bridge
).execute()
return target in controllers
def check_controller_fail_mode(self, fail_mode):
return fail_mode == self._db_get_val('Bridge', self.
integration_bridge, 'fail_mode')
def get_virtual_tunnel_ports(self):
ifaces = self.ovsdb.db_find('Interface', ('options', '=', {
'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()
tunnel_ports = []
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=
iface['name'], tunnel_type=iface['type']))
return tunnel_ports
def add_virtual_tunnel_port(self, tunnel_type):
self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()
def delete_port(self, switch_port):
self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(
)
@staticmethod
def _check_ofport(port_name, ofport):
if ofport is None:
LOG.warning("Can't find ofport for port %s.", port_name)
return False
if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:
LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',
{'ofport': ofport, 'port': port_name})
return False
return True
def get_interface_by_id_with_specified_columns(self, port_id,
specified_columns):
columns = {'external_ids', 'name'}
columns.update(specified_columns)
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=columns).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface
def get_port_ofport_by_id(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'name', 'ofport'})
if iface and self._check_ofport(iface['name'], iface['ofport']):
return iface['ofport']
def get_local_port_mac_in_use(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(port_id, {
'mac_in_use'})
if iface and netaddr.valid_mac(iface['mac_in_use']):
return iface['mac_in_use']
def _get_port_name_by_id(self, port_id):
ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', 'name']).execute()
for iface in ifaces:
if self.integration_bridge != self._get_bridge_for_iface(iface[
'name']):
continue
return iface['name']
def _gen_link_mapping(self, bridge1, bridge2, bridge1_link_name=None,
bridge2_link_name=None):
if bridge1_link_name is None:
bridge1_link_name = '%s-patch' % bridge2
if bridge2_link_name is None:
bridge2_link_name = '%s-patch' % bridge1
LOG.debug(
'genrated mappings {%(bridge1)s: %(link1)s, %(bridge2)s: %(link2)s}'
, {'bridge1': bridge1, 'link1': bridge1_link_name, 'bridge2':
bridge2, 'link2': bridge2_link_name})
return bridge1_link_name, bridge2_link_name
def map_patch_to_network(self, network, patch_name):
self.bridge_mapping[network] = patch_name
def get_phy_network_ofport(self, network):
patch_name = self.bridge_mapping.get(network)
if patch_name:
return self.get_port_ofport(patch_name)
def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=
None, peer_link_name=None):
links = self._gen_link_mapping(local_bridge, peer_bridge,
local_link_name, peer_link_name)
self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])
self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])
return links
def _create_patch_port(self, bridge, port, peer, peer_port):
if cfg.CONF.df.enable_dpdk:
self.ovsdb.add_br(bridge, datapath_type='netdev').execute()
else:
self.ovsdb.add_br(bridge, datapath_type='system').execute()
if not self.patch_port_exist(port):
self.ovsdb.add_patch_port(bridge, port, peer, peer_port).execute()
<|reserved_special_token_0|>
def get_port_ofport(self, port):
return self._db_get_val('Interface', port, 'ofport', check_error=
False, log_errors=False)
def get_port_mac_in_use(self, port):
return self._db_get_val('Interface', port, 'mac_in_use',
check_error=False, log_errors=False)
def get_port_qos(self, port_id):
port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {
'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()
if port_qoses:
ovsdb_qos = port_qoses[0]
external_ids = ovsdb_qos['external_ids']
return qos.QosPolicy(id=external_ids.get('qos-id'), topic=
external_ids.get('qos-topic'), version=external_ids.get(
'version'))
<|reserved_special_token_0|>
def update_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', max_kbps), (
'ingress_policing_burst', max_burst_kbps)))
txn.add(self.ovsdb.update_qos(port_id, qos))
def clear_port_qos(self, port_id):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name, (
'ingress_policing_rate', 0), ('ingress_policing_burst', 0)))
txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))
txn.add(self.ovsdb.delete_qos(port_id))
<|reserved_special_token_0|>
def get_vtp_ofport(self, tunnel_type):
return self.get_port_ofport(tunnel_type + '-vtp')
<|reserved_special_token_1|>
# Copyright (c) 2015 OpenStack Foundation.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo_config import cfg
from oslo_log import log
from ovs import vlog
from dragonflow.controller.common import constants
from dragonflow.db.models import ovs
from dragonflow.db.models import qos
from dragonflow.ovsdb import impl_idl
LOG = log.getLogger(__name__)
OFPORT_RANGE_MIN = 1
OFPORT_RANGE_MAX = 65533
OVS_LOG_FILE_NAME = 'df-ovs.log'
class OvsApi(object):
"""The interface of openvswitch
Consumers use this class to set openvswitch or get results from
openvswitch.
"""
def __init__(self, ip, protocol='tcp', port='6640', timeout=10):
super(OvsApi, self).__init__()
self.ip = ip
self.protocol = protocol
self.port = port
# NOTE: This has to be this name vsctl_timeout, as neutron will use
# this attribute to set the timeout of ovs db.
self.vsctl_timeout = timeout
self.ovsdb = None
self.integration_bridge = cfg.CONF.df.integration_bridge
if cfg.CONF.log_dir:
vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)
else:
vlog.Vlog.init()
def initialize(self, nb_api):
db_connection = ('%s:%s:%s' % (self.protocol, self.ip, self.port))
nb_api.db_change_callback(None, None,
constants.CONTROLLER_OVS_SYNC_STARTED, None)
self.ovsdb = impl_idl.DFOvsdbApi(
nb_api, db_connection, self.vsctl_timeout)
nb_api.db_change_callback(None, None,
constants.CONTROLLER_OVS_SYNC_FINISHED, None)
def _db_get_val(self, table, record, column, check_error=False,
log_errors=True):
return self.ovsdb.db_get(table, record, column).execute(
check_error=check_error, log_errors=log_errors)
def _get_bridge_for_iface(self, iface_name):
return self.ovsdb.iface_to_br(iface_name).execute()
def set_controller(self, bridge, targets):
self.ovsdb.set_controller(bridge, targets).execute()
def set_controller_fail_mode(self, bridge, fail_mode):
self.ovsdb.set_fail_mode(bridge, fail_mode).execute()
def check_controller(self, target):
controllers = self.ovsdb.get_controller(
self.integration_bridge).execute()
return target in controllers
def check_controller_fail_mode(self, fail_mode):
return fail_mode == self._db_get_val('Bridge',
self.integration_bridge,
'fail_mode')
def get_virtual_tunnel_ports(self):
ifaces = self.ovsdb.db_find(
'Interface', ('options', '=', {'remote_ip': 'flow'}),
columns=['uuid', 'name', 'type']).execute()
tunnel_ports = []
for iface in ifaces:
if (self.integration_bridge !=
self._get_bridge_for_iface(iface['name'])):
continue
tunnel_ports.append(
ovs.OvsPort(
id=str(iface['uuid']),
name=iface['name'],
tunnel_type=iface['type'],
),
)
return tunnel_ports
def add_virtual_tunnel_port(self, tunnel_type):
self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()
def delete_port(self, switch_port):
self.ovsdb.del_port(switch_port.name,
self.integration_bridge).execute()
@staticmethod
def _check_ofport(port_name, ofport):
if ofport is None:
LOG.warning("Can't find ofport for port %s.", port_name)
return False
if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:
LOG.warning("ofport %(ofport)s for port %(port)s is invalid.",
{'ofport': ofport, 'port': port_name})
return False
return True
def get_interface_by_id_with_specified_columns(self, port_id,
specified_columns):
columns = {'external_ids', 'name'}
columns.update(specified_columns)
ifaces = self.ovsdb.db_find(
'Interface', ('external_ids', '=', {'iface-id': port_id}),
columns=columns).execute()
for iface in ifaces:
if (self.integration_bridge !=
self._get_bridge_for_iface(iface['name'])):
# iface-id is the port id in neutron, the same neutron port
# might create multiple interfaces in different bridges
continue
return iface
def get_port_ofport_by_id(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(
port_id, {'name', 'ofport'})
if iface and self._check_ofport(iface['name'], iface['ofport']):
return iface['ofport']
def get_local_port_mac_in_use(self, port_id):
iface = self.get_interface_by_id_with_specified_columns(
port_id, {'mac_in_use'})
if iface and netaddr.valid_mac(iface['mac_in_use']):
return iface['mac_in_use']
def _get_port_name_by_id(self, port_id):
ifaces = self.ovsdb.db_find(
'Interface', ('external_ids', '=', {'iface-id': port_id}),
columns=['external_ids', 'name']).execute()
for iface in ifaces:
if (self.integration_bridge !=
self._get_bridge_for_iface(iface['name'])):
# iface-id is the port id in neutron, the same neutron port
# might create multiple interfaces in different bridges
continue
return iface['name']
def _gen_link_mapping(self, bridge1, bridge2,
bridge1_link_name=None,
bridge2_link_name=None):
if bridge1_link_name is None:
bridge1_link_name = "%s-patch" % bridge2
if bridge2_link_name is None:
bridge2_link_name = "%s-patch" % bridge1
LOG.debug('genrated mappings {%(bridge1)s: %(link1)s,'
' %(bridge2)s: %(link2)s}',
{'bridge1': bridge1,
'link1': bridge1_link_name,
'bridge2': bridge2,
'link2': bridge2_link_name})
return (bridge1_link_name, bridge2_link_name)
def map_patch_to_network(self, network, patch_name):
self.bridge_mapping[network] = patch_name
def get_phy_network_ofport(self, network):
patch_name = self.bridge_mapping.get(network)
if patch_name:
return self.get_port_ofport(patch_name)
def create_patch_pair(self, local_bridge, peer_bridge,
local_link_name=None, peer_link_name=None):
links = self._gen_link_mapping(
local_bridge,
peer_bridge,
local_link_name,
peer_link_name)
self._create_patch_port(
local_bridge,
links[0],
peer_bridge,
links[1])
self._create_patch_port(
peer_bridge,
links[1],
local_bridge,
links[0])
return links
def _create_patch_port(self, bridge, port, peer, peer_port):
if cfg.CONF.df.enable_dpdk:
self.ovsdb.add_br(bridge, datapath_type='netdev').execute()
else:
self.ovsdb.add_br(bridge, datapath_type='system').execute()
if not self.patch_port_exist(port):
self.ovsdb.add_patch_port(bridge, port, peer, peer_port).execute()
def patch_port_exist(self, port):
return 'patch' == self._db_get_val('Interface', port, 'type',
check_error=False,
log_errors=False)
def get_port_ofport(self, port):
return self._db_get_val('Interface', port, 'ofport',
check_error=False, log_errors=False)
def get_port_mac_in_use(self, port):
return self._db_get_val('Interface', port, 'mac_in_use',
check_error=False, log_errors=False)
def get_port_qos(self, port_id):
port_qoses = self.ovsdb.db_find(
'QoS', ('external_ids', '=', {'iface-id': port_id}),
columns=['external_ids', '_uuid']).execute()
if port_qoses:
ovsdb_qos = port_qoses[0]
external_ids = ovsdb_qos['external_ids']
return qos.QosPolicy(
id=external_ids.get('qos-id'),
topic=external_ids.get('qos-topic'),
version=external_ids.get('version'),
)
def set_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
qos_uuid = txn.add(self.ovsdb.create_qos(port_id, qos))
txn.add(self.ovsdb.db_set('Interface', port_name,
('ingress_policing_rate', max_kbps),
('ingress_policing_burst',
max_burst_kbps)))
txn.add(self.ovsdb.db_set('Port', port_name, ('qos', qos_uuid)))
def update_port_qos(self, port_id, qos):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
max_kbps = qos.get_max_kbps()
max_burst_kbps = qos.get_max_burst_kbps()
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name,
('ingress_policing_rate', max_kbps),
('ingress_policing_burst',
max_burst_kbps)))
txn.add(self.ovsdb.update_qos(port_id, qos))
def clear_port_qos(self, port_id):
port_name = self._get_port_name_by_id(port_id)
if not port_name:
return
with self.ovsdb.transaction(check_error=True) as txn:
txn.add(self.ovsdb.db_set('Interface', port_name,
('ingress_policing_rate', 0),
('ingress_policing_burst', 0)))
txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))
txn.add(self.ovsdb.delete_qos(port_id))
def delete_port_qos_and_queue(self, port_id):
self.ovsdb.delete_qos(port_id).execute()
def get_vtp_ofport(self, tunnel_type):
return self.get_port_ofport(tunnel_type + '-vtp')
|
flexible
|
{
"blob_id": "89a3c34b3145b93a4cfa78eeb055c8136ab2bfe6",
"index": 2084,
"step-1": "<mask token>\n\n\nclass OvsApi(object):\n <mask token>\n\n def __init__(self, ip, protocol='tcp', port='6640', timeout=10):\n super(OvsApi, self).__init__()\n self.ip = ip\n self.protocol = protocol\n self.port = port\n self.vsctl_timeout = timeout\n self.ovsdb = None\n self.integration_bridge = cfg.CONF.df.integration_bridge\n if cfg.CONF.log_dir:\n vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)\n else:\n vlog.Vlog.init()\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def set_controller_fail_mode(self, bridge, fail_mode):\n self.ovsdb.set_fail_mode(bridge, fail_mode).execute()\n <mask token>\n\n def check_controller_fail_mode(self, fail_mode):\n return fail_mode == self._db_get_val('Bridge', self.\n integration_bridge, 'fail_mode')\n\n def get_virtual_tunnel_ports(self):\n ifaces = self.ovsdb.db_find('Interface', ('options', '=', {\n 'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()\n tunnel_ports = []\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=\n iface['name'], tunnel_type=iface['type']))\n return tunnel_ports\n\n def add_virtual_tunnel_port(self, tunnel_type):\n self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()\n\n def delete_port(self, switch_port):\n self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(\n )\n\n @staticmethod\n def _check_ofport(port_name, ofport):\n if ofport is None:\n LOG.warning(\"Can't find ofport for port %s.\", port_name)\n return False\n if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:\n LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',\n {'ofport': ofport, 'port': port_name})\n return False\n return True\n\n def get_interface_by_id_with_specified_columns(self, port_id,\n specified_columns):\n columns = {'external_ids', 'name'}\n columns.update(specified_columns)\n ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {\n 'iface-id': port_id}), columns=columns).execute()\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n return iface\n <mask token>\n\n def get_local_port_mac_in_use(self, port_id):\n iface = self.get_interface_by_id_with_specified_columns(port_id, {\n 'mac_in_use'})\n if iface and netaddr.valid_mac(iface['mac_in_use']):\n return iface['mac_in_use']\n\n def _get_port_name_by_id(self, port_id):\n ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {\n 'iface-id': port_id}), columns=['external_ids', 'name']).execute()\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n return iface['name']\n <mask token>\n\n def map_patch_to_network(self, network, patch_name):\n self.bridge_mapping[network] = patch_name\n\n def get_phy_network_ofport(self, network):\n patch_name = self.bridge_mapping.get(network)\n if patch_name:\n return self.get_port_ofport(patch_name)\n\n def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=\n None, peer_link_name=None):\n links = self._gen_link_mapping(local_bridge, peer_bridge,\n local_link_name, peer_link_name)\n self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])\n self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])\n return links\n <mask token>\n <mask token>\n\n def get_port_ofport(self, port):\n return self._db_get_val('Interface', port, 'ofport', check_error=\n False, log_errors=False)\n\n def get_port_mac_in_use(self, port):\n return self._db_get_val('Interface', port, 'mac_in_use',\n check_error=False, log_errors=False)\n\n def get_port_qos(self, port_id):\n port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {\n 'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()\n if port_qoses:\n ovsdb_qos = port_qoses[0]\n external_ids = ovsdb_qos['external_ids']\n return qos.QosPolicy(id=external_ids.get('qos-id'), topic=\n external_ids.get('qos-topic'), version=external_ids.get(\n 'version'))\n <mask token>\n\n def update_port_qos(self, port_id, qos):\n port_name = self._get_port_name_by_id(port_id)\n if not port_name:\n return\n max_kbps = qos.get_max_kbps()\n max_burst_kbps = qos.get_max_burst_kbps()\n with self.ovsdb.transaction(check_error=True) as txn:\n txn.add(self.ovsdb.db_set('Interface', port_name, (\n 'ingress_policing_rate', max_kbps), (\n 'ingress_policing_burst', max_burst_kbps)))\n txn.add(self.ovsdb.update_qos(port_id, qos))\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass OvsApi(object):\n <mask token>\n\n def __init__(self, ip, protocol='tcp', port='6640', timeout=10):\n super(OvsApi, self).__init__()\n self.ip = ip\n self.protocol = protocol\n self.port = port\n self.vsctl_timeout = timeout\n self.ovsdb = None\n self.integration_bridge = cfg.CONF.df.integration_bridge\n if cfg.CONF.log_dir:\n vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)\n else:\n vlog.Vlog.init()\n\n def initialize(self, nb_api):\n db_connection = '%s:%s:%s' % (self.protocol, self.ip, self.port)\n nb_api.db_change_callback(None, None, constants.\n CONTROLLER_OVS_SYNC_STARTED, None)\n self.ovsdb = impl_idl.DFOvsdbApi(nb_api, db_connection, self.\n vsctl_timeout)\n nb_api.db_change_callback(None, None, constants.\n CONTROLLER_OVS_SYNC_FINISHED, None)\n\n def _db_get_val(self, table, record, column, check_error=False,\n log_errors=True):\n return self.ovsdb.db_get(table, record, column).execute(check_error\n =check_error, log_errors=log_errors)\n <mask token>\n\n def set_controller(self, bridge, targets):\n self.ovsdb.set_controller(bridge, targets).execute()\n\n def set_controller_fail_mode(self, bridge, fail_mode):\n self.ovsdb.set_fail_mode(bridge, fail_mode).execute()\n <mask token>\n\n def check_controller_fail_mode(self, fail_mode):\n return fail_mode == self._db_get_val('Bridge', self.\n integration_bridge, 'fail_mode')\n\n def get_virtual_tunnel_ports(self):\n ifaces = self.ovsdb.db_find('Interface', ('options', '=', {\n 'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()\n tunnel_ports = []\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=\n iface['name'], tunnel_type=iface['type']))\n return tunnel_ports\n\n def add_virtual_tunnel_port(self, tunnel_type):\n self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()\n\n def delete_port(self, switch_port):\n self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(\n )\n\n @staticmethod\n def _check_ofport(port_name, ofport):\n if ofport is None:\n LOG.warning(\"Can't find ofport for port %s.\", port_name)\n return False\n if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:\n LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',\n {'ofport': ofport, 'port': port_name})\n return False\n return True\n\n def get_interface_by_id_with_specified_columns(self, port_id,\n specified_columns):\n columns = {'external_ids', 'name'}\n columns.update(specified_columns)\n ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {\n 'iface-id': port_id}), columns=columns).execute()\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n return iface\n <mask token>\n\n def get_local_port_mac_in_use(self, port_id):\n iface = self.get_interface_by_id_with_specified_columns(port_id, {\n 'mac_in_use'})\n if iface and netaddr.valid_mac(iface['mac_in_use']):\n return iface['mac_in_use']\n\n def _get_port_name_by_id(self, port_id):\n ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {\n 'iface-id': port_id}), columns=['external_ids', 'name']).execute()\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n return iface['name']\n\n def _gen_link_mapping(self, bridge1, bridge2, bridge1_link_name=None,\n bridge2_link_name=None):\n if bridge1_link_name is None:\n bridge1_link_name = '%s-patch' % bridge2\n if bridge2_link_name is None:\n bridge2_link_name = '%s-patch' % bridge1\n LOG.debug(\n 'genrated mappings {%(bridge1)s: %(link1)s, %(bridge2)s: %(link2)s}'\n , {'bridge1': bridge1, 'link1': bridge1_link_name, 'bridge2':\n bridge2, 'link2': bridge2_link_name})\n return bridge1_link_name, bridge2_link_name\n\n def map_patch_to_network(self, network, patch_name):\n self.bridge_mapping[network] = patch_name\n\n def get_phy_network_ofport(self, network):\n patch_name = self.bridge_mapping.get(network)\n if patch_name:\n return self.get_port_ofport(patch_name)\n\n def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=\n None, peer_link_name=None):\n links = self._gen_link_mapping(local_bridge, peer_bridge,\n local_link_name, peer_link_name)\n self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])\n self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])\n return links\n <mask token>\n <mask token>\n\n def get_port_ofport(self, port):\n return self._db_get_val('Interface', port, 'ofport', check_error=\n False, log_errors=False)\n\n def get_port_mac_in_use(self, port):\n return self._db_get_val('Interface', port, 'mac_in_use',\n check_error=False, log_errors=False)\n\n def get_port_qos(self, port_id):\n port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {\n 'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()\n if port_qoses:\n ovsdb_qos = port_qoses[0]\n external_ids = ovsdb_qos['external_ids']\n return qos.QosPolicy(id=external_ids.get('qos-id'), topic=\n external_ids.get('qos-topic'), version=external_ids.get(\n 'version'))\n <mask token>\n\n def update_port_qos(self, port_id, qos):\n port_name = self._get_port_name_by_id(port_id)\n if not port_name:\n return\n max_kbps = qos.get_max_kbps()\n max_burst_kbps = qos.get_max_burst_kbps()\n with self.ovsdb.transaction(check_error=True) as txn:\n txn.add(self.ovsdb.db_set('Interface', port_name, (\n 'ingress_policing_rate', max_kbps), (\n 'ingress_policing_burst', max_burst_kbps)))\n txn.add(self.ovsdb.update_qos(port_id, qos))\n\n def clear_port_qos(self, port_id):\n port_name = self._get_port_name_by_id(port_id)\n if not port_name:\n return\n with self.ovsdb.transaction(check_error=True) as txn:\n txn.add(self.ovsdb.db_set('Interface', port_name, (\n 'ingress_policing_rate', 0), ('ingress_policing_burst', 0)))\n txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))\n txn.add(self.ovsdb.delete_qos(port_id))\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass OvsApi(object):\n <mask token>\n\n def __init__(self, ip, protocol='tcp', port='6640', timeout=10):\n super(OvsApi, self).__init__()\n self.ip = ip\n self.protocol = protocol\n self.port = port\n self.vsctl_timeout = timeout\n self.ovsdb = None\n self.integration_bridge = cfg.CONF.df.integration_bridge\n if cfg.CONF.log_dir:\n vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)\n else:\n vlog.Vlog.init()\n\n def initialize(self, nb_api):\n db_connection = '%s:%s:%s' % (self.protocol, self.ip, self.port)\n nb_api.db_change_callback(None, None, constants.\n CONTROLLER_OVS_SYNC_STARTED, None)\n self.ovsdb = impl_idl.DFOvsdbApi(nb_api, db_connection, self.\n vsctl_timeout)\n nb_api.db_change_callback(None, None, constants.\n CONTROLLER_OVS_SYNC_FINISHED, None)\n\n def _db_get_val(self, table, record, column, check_error=False,\n log_errors=True):\n return self.ovsdb.db_get(table, record, column).execute(check_error\n =check_error, log_errors=log_errors)\n <mask token>\n\n def set_controller(self, bridge, targets):\n self.ovsdb.set_controller(bridge, targets).execute()\n\n def set_controller_fail_mode(self, bridge, fail_mode):\n self.ovsdb.set_fail_mode(bridge, fail_mode).execute()\n\n def check_controller(self, target):\n controllers = self.ovsdb.get_controller(self.integration_bridge\n ).execute()\n return target in controllers\n\n def check_controller_fail_mode(self, fail_mode):\n return fail_mode == self._db_get_val('Bridge', self.\n integration_bridge, 'fail_mode')\n\n def get_virtual_tunnel_ports(self):\n ifaces = self.ovsdb.db_find('Interface', ('options', '=', {\n 'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()\n tunnel_ports = []\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=\n iface['name'], tunnel_type=iface['type']))\n return tunnel_ports\n\n def add_virtual_tunnel_port(self, tunnel_type):\n self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()\n\n def delete_port(self, switch_port):\n self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(\n )\n\n @staticmethod\n def _check_ofport(port_name, ofport):\n if ofport is None:\n LOG.warning(\"Can't find ofport for port %s.\", port_name)\n return False\n if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:\n LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',\n {'ofport': ofport, 'port': port_name})\n return False\n return True\n\n def get_interface_by_id_with_specified_columns(self, port_id,\n specified_columns):\n columns = {'external_ids', 'name'}\n columns.update(specified_columns)\n ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {\n 'iface-id': port_id}), columns=columns).execute()\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n return iface\n\n def get_port_ofport_by_id(self, port_id):\n iface = self.get_interface_by_id_with_specified_columns(port_id, {\n 'name', 'ofport'})\n if iface and self._check_ofport(iface['name'], iface['ofport']):\n return iface['ofport']\n\n def get_local_port_mac_in_use(self, port_id):\n iface = self.get_interface_by_id_with_specified_columns(port_id, {\n 'mac_in_use'})\n if iface and netaddr.valid_mac(iface['mac_in_use']):\n return iface['mac_in_use']\n\n def _get_port_name_by_id(self, port_id):\n ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {\n 'iface-id': port_id}), columns=['external_ids', 'name']).execute()\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n return iface['name']\n\n def _gen_link_mapping(self, bridge1, bridge2, bridge1_link_name=None,\n bridge2_link_name=None):\n if bridge1_link_name is None:\n bridge1_link_name = '%s-patch' % bridge2\n if bridge2_link_name is None:\n bridge2_link_name = '%s-patch' % bridge1\n LOG.debug(\n 'genrated mappings {%(bridge1)s: %(link1)s, %(bridge2)s: %(link2)s}'\n , {'bridge1': bridge1, 'link1': bridge1_link_name, 'bridge2':\n bridge2, 'link2': bridge2_link_name})\n return bridge1_link_name, bridge2_link_name\n\n def map_patch_to_network(self, network, patch_name):\n self.bridge_mapping[network] = patch_name\n\n def get_phy_network_ofport(self, network):\n patch_name = self.bridge_mapping.get(network)\n if patch_name:\n return self.get_port_ofport(patch_name)\n\n def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=\n None, peer_link_name=None):\n links = self._gen_link_mapping(local_bridge, peer_bridge,\n local_link_name, peer_link_name)\n self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])\n self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])\n return links\n <mask token>\n <mask token>\n\n def get_port_ofport(self, port):\n return self._db_get_val('Interface', port, 'ofport', check_error=\n False, log_errors=False)\n\n def get_port_mac_in_use(self, port):\n return self._db_get_val('Interface', port, 'mac_in_use',\n check_error=False, log_errors=False)\n\n def get_port_qos(self, port_id):\n port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {\n 'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()\n if port_qoses:\n ovsdb_qos = port_qoses[0]\n external_ids = ovsdb_qos['external_ids']\n return qos.QosPolicy(id=external_ids.get('qos-id'), topic=\n external_ids.get('qos-topic'), version=external_ids.get(\n 'version'))\n <mask token>\n\n def update_port_qos(self, port_id, qos):\n port_name = self._get_port_name_by_id(port_id)\n if not port_name:\n return\n max_kbps = qos.get_max_kbps()\n max_burst_kbps = qos.get_max_burst_kbps()\n with self.ovsdb.transaction(check_error=True) as txn:\n txn.add(self.ovsdb.db_set('Interface', port_name, (\n 'ingress_policing_rate', max_kbps), (\n 'ingress_policing_burst', max_burst_kbps)))\n txn.add(self.ovsdb.update_qos(port_id, qos))\n\n def clear_port_qos(self, port_id):\n port_name = self._get_port_name_by_id(port_id)\n if not port_name:\n return\n with self.ovsdb.transaction(check_error=True) as txn:\n txn.add(self.ovsdb.db_set('Interface', port_name, (\n 'ingress_policing_rate', 0), ('ingress_policing_burst', 0)))\n txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))\n txn.add(self.ovsdb.delete_qos(port_id))\n <mask token>\n <mask token>\n",
"step-4": "<mask token>\n\n\nclass OvsApi(object):\n <mask token>\n\n def __init__(self, ip, protocol='tcp', port='6640', timeout=10):\n super(OvsApi, self).__init__()\n self.ip = ip\n self.protocol = protocol\n self.port = port\n self.vsctl_timeout = timeout\n self.ovsdb = None\n self.integration_bridge = cfg.CONF.df.integration_bridge\n if cfg.CONF.log_dir:\n vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)\n else:\n vlog.Vlog.init()\n\n def initialize(self, nb_api):\n db_connection = '%s:%s:%s' % (self.protocol, self.ip, self.port)\n nb_api.db_change_callback(None, None, constants.\n CONTROLLER_OVS_SYNC_STARTED, None)\n self.ovsdb = impl_idl.DFOvsdbApi(nb_api, db_connection, self.\n vsctl_timeout)\n nb_api.db_change_callback(None, None, constants.\n CONTROLLER_OVS_SYNC_FINISHED, None)\n\n def _db_get_val(self, table, record, column, check_error=False,\n log_errors=True):\n return self.ovsdb.db_get(table, record, column).execute(check_error\n =check_error, log_errors=log_errors)\n <mask token>\n\n def set_controller(self, bridge, targets):\n self.ovsdb.set_controller(bridge, targets).execute()\n\n def set_controller_fail_mode(self, bridge, fail_mode):\n self.ovsdb.set_fail_mode(bridge, fail_mode).execute()\n\n def check_controller(self, target):\n controllers = self.ovsdb.get_controller(self.integration_bridge\n ).execute()\n return target in controllers\n\n def check_controller_fail_mode(self, fail_mode):\n return fail_mode == self._db_get_val('Bridge', self.\n integration_bridge, 'fail_mode')\n\n def get_virtual_tunnel_ports(self):\n ifaces = self.ovsdb.db_find('Interface', ('options', '=', {\n 'remote_ip': 'flow'}), columns=['uuid', 'name', 'type']).execute()\n tunnel_ports = []\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n tunnel_ports.append(ovs.OvsPort(id=str(iface['uuid']), name=\n iface['name'], tunnel_type=iface['type']))\n return tunnel_ports\n\n def add_virtual_tunnel_port(self, tunnel_type):\n self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()\n\n def delete_port(self, switch_port):\n self.ovsdb.del_port(switch_port.name, self.integration_bridge).execute(\n )\n\n @staticmethod\n def _check_ofport(port_name, ofport):\n if ofport is None:\n LOG.warning(\"Can't find ofport for port %s.\", port_name)\n return False\n if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:\n LOG.warning('ofport %(ofport)s for port %(port)s is invalid.',\n {'ofport': ofport, 'port': port_name})\n return False\n return True\n\n def get_interface_by_id_with_specified_columns(self, port_id,\n specified_columns):\n columns = {'external_ids', 'name'}\n columns.update(specified_columns)\n ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {\n 'iface-id': port_id}), columns=columns).execute()\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n return iface\n\n def get_port_ofport_by_id(self, port_id):\n iface = self.get_interface_by_id_with_specified_columns(port_id, {\n 'name', 'ofport'})\n if iface and self._check_ofport(iface['name'], iface['ofport']):\n return iface['ofport']\n\n def get_local_port_mac_in_use(self, port_id):\n iface = self.get_interface_by_id_with_specified_columns(port_id, {\n 'mac_in_use'})\n if iface and netaddr.valid_mac(iface['mac_in_use']):\n return iface['mac_in_use']\n\n def _get_port_name_by_id(self, port_id):\n ifaces = self.ovsdb.db_find('Interface', ('external_ids', '=', {\n 'iface-id': port_id}), columns=['external_ids', 'name']).execute()\n for iface in ifaces:\n if self.integration_bridge != self._get_bridge_for_iface(iface[\n 'name']):\n continue\n return iface['name']\n\n def _gen_link_mapping(self, bridge1, bridge2, bridge1_link_name=None,\n bridge2_link_name=None):\n if bridge1_link_name is None:\n bridge1_link_name = '%s-patch' % bridge2\n if bridge2_link_name is None:\n bridge2_link_name = '%s-patch' % bridge1\n LOG.debug(\n 'genrated mappings {%(bridge1)s: %(link1)s, %(bridge2)s: %(link2)s}'\n , {'bridge1': bridge1, 'link1': bridge1_link_name, 'bridge2':\n bridge2, 'link2': bridge2_link_name})\n return bridge1_link_name, bridge2_link_name\n\n def map_patch_to_network(self, network, patch_name):\n self.bridge_mapping[network] = patch_name\n\n def get_phy_network_ofport(self, network):\n patch_name = self.bridge_mapping.get(network)\n if patch_name:\n return self.get_port_ofport(patch_name)\n\n def create_patch_pair(self, local_bridge, peer_bridge, local_link_name=\n None, peer_link_name=None):\n links = self._gen_link_mapping(local_bridge, peer_bridge,\n local_link_name, peer_link_name)\n self._create_patch_port(local_bridge, links[0], peer_bridge, links[1])\n self._create_patch_port(peer_bridge, links[1], local_bridge, links[0])\n return links\n\n def _create_patch_port(self, bridge, port, peer, peer_port):\n if cfg.CONF.df.enable_dpdk:\n self.ovsdb.add_br(bridge, datapath_type='netdev').execute()\n else:\n self.ovsdb.add_br(bridge, datapath_type='system').execute()\n if not self.patch_port_exist(port):\n self.ovsdb.add_patch_port(bridge, port, peer, peer_port).execute()\n <mask token>\n\n def get_port_ofport(self, port):\n return self._db_get_val('Interface', port, 'ofport', check_error=\n False, log_errors=False)\n\n def get_port_mac_in_use(self, port):\n return self._db_get_val('Interface', port, 'mac_in_use',\n check_error=False, log_errors=False)\n\n def get_port_qos(self, port_id):\n port_qoses = self.ovsdb.db_find('QoS', ('external_ids', '=', {\n 'iface-id': port_id}), columns=['external_ids', '_uuid']).execute()\n if port_qoses:\n ovsdb_qos = port_qoses[0]\n external_ids = ovsdb_qos['external_ids']\n return qos.QosPolicy(id=external_ids.get('qos-id'), topic=\n external_ids.get('qos-topic'), version=external_ids.get(\n 'version'))\n <mask token>\n\n def update_port_qos(self, port_id, qos):\n port_name = self._get_port_name_by_id(port_id)\n if not port_name:\n return\n max_kbps = qos.get_max_kbps()\n max_burst_kbps = qos.get_max_burst_kbps()\n with self.ovsdb.transaction(check_error=True) as txn:\n txn.add(self.ovsdb.db_set('Interface', port_name, (\n 'ingress_policing_rate', max_kbps), (\n 'ingress_policing_burst', max_burst_kbps)))\n txn.add(self.ovsdb.update_qos(port_id, qos))\n\n def clear_port_qos(self, port_id):\n port_name = self._get_port_name_by_id(port_id)\n if not port_name:\n return\n with self.ovsdb.transaction(check_error=True) as txn:\n txn.add(self.ovsdb.db_set('Interface', port_name, (\n 'ingress_policing_rate', 0), ('ingress_policing_burst', 0)))\n txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))\n txn.add(self.ovsdb.delete_qos(port_id))\n <mask token>\n\n def get_vtp_ofport(self, tunnel_type):\n return self.get_port_ofport(tunnel_type + '-vtp')\n",
"step-5": "# Copyright (c) 2015 OpenStack Foundation.\n#\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport netaddr\nfrom oslo_config import cfg\nfrom oslo_log import log\nfrom ovs import vlog\n\nfrom dragonflow.controller.common import constants\nfrom dragonflow.db.models import ovs\nfrom dragonflow.db.models import qos\nfrom dragonflow.ovsdb import impl_idl\n\nLOG = log.getLogger(__name__)\n\nOFPORT_RANGE_MIN = 1\nOFPORT_RANGE_MAX = 65533\n\nOVS_LOG_FILE_NAME = 'df-ovs.log'\n\n\nclass OvsApi(object):\n \"\"\"The interface of openvswitch\n\n Consumers use this class to set openvswitch or get results from\n openvswitch.\n \"\"\"\n\n def __init__(self, ip, protocol='tcp', port='6640', timeout=10):\n super(OvsApi, self).__init__()\n self.ip = ip\n self.protocol = protocol\n self.port = port\n # NOTE: This has to be this name vsctl_timeout, as neutron will use\n # this attribute to set the timeout of ovs db.\n self.vsctl_timeout = timeout\n self.ovsdb = None\n self.integration_bridge = cfg.CONF.df.integration_bridge\n if cfg.CONF.log_dir:\n vlog.Vlog.init(cfg.CONF.log_dir + '/' + OVS_LOG_FILE_NAME)\n else:\n vlog.Vlog.init()\n\n def initialize(self, nb_api):\n db_connection = ('%s:%s:%s' % (self.protocol, self.ip, self.port))\n\n nb_api.db_change_callback(None, None,\n constants.CONTROLLER_OVS_SYNC_STARTED, None)\n\n self.ovsdb = impl_idl.DFOvsdbApi(\n nb_api, db_connection, self.vsctl_timeout)\n\n nb_api.db_change_callback(None, None,\n constants.CONTROLLER_OVS_SYNC_FINISHED, None)\n\n def _db_get_val(self, table, record, column, check_error=False,\n log_errors=True):\n return self.ovsdb.db_get(table, record, column).execute(\n check_error=check_error, log_errors=log_errors)\n\n def _get_bridge_for_iface(self, iface_name):\n return self.ovsdb.iface_to_br(iface_name).execute()\n\n def set_controller(self, bridge, targets):\n self.ovsdb.set_controller(bridge, targets).execute()\n\n def set_controller_fail_mode(self, bridge, fail_mode):\n self.ovsdb.set_fail_mode(bridge, fail_mode).execute()\n\n def check_controller(self, target):\n controllers = self.ovsdb.get_controller(\n self.integration_bridge).execute()\n return target in controllers\n\n def check_controller_fail_mode(self, fail_mode):\n return fail_mode == self._db_get_val('Bridge',\n self.integration_bridge,\n 'fail_mode')\n\n def get_virtual_tunnel_ports(self):\n ifaces = self.ovsdb.db_find(\n 'Interface', ('options', '=', {'remote_ip': 'flow'}),\n columns=['uuid', 'name', 'type']).execute()\n tunnel_ports = []\n for iface in ifaces:\n if (self.integration_bridge !=\n self._get_bridge_for_iface(iface['name'])):\n continue\n\n tunnel_ports.append(\n ovs.OvsPort(\n id=str(iface['uuid']),\n name=iface['name'],\n tunnel_type=iface['type'],\n ),\n )\n\n return tunnel_ports\n\n def add_virtual_tunnel_port(self, tunnel_type):\n self.ovsdb.add_virtual_tunnel_port(tunnel_type).execute()\n\n def delete_port(self, switch_port):\n self.ovsdb.del_port(switch_port.name,\n self.integration_bridge).execute()\n\n @staticmethod\n def _check_ofport(port_name, ofport):\n if ofport is None:\n LOG.warning(\"Can't find ofport for port %s.\", port_name)\n return False\n if ofport < OFPORT_RANGE_MIN or ofport > OFPORT_RANGE_MAX:\n LOG.warning(\"ofport %(ofport)s for port %(port)s is invalid.\",\n {'ofport': ofport, 'port': port_name})\n return False\n\n return True\n\n def get_interface_by_id_with_specified_columns(self, port_id,\n specified_columns):\n columns = {'external_ids', 'name'}\n columns.update(specified_columns)\n ifaces = self.ovsdb.db_find(\n 'Interface', ('external_ids', '=', {'iface-id': port_id}),\n columns=columns).execute()\n\n for iface in ifaces:\n if (self.integration_bridge !=\n self._get_bridge_for_iface(iface['name'])):\n # iface-id is the port id in neutron, the same neutron port\n # might create multiple interfaces in different bridges\n continue\n return iface\n\n def get_port_ofport_by_id(self, port_id):\n iface = self.get_interface_by_id_with_specified_columns(\n port_id, {'name', 'ofport'})\n if iface and self._check_ofport(iface['name'], iface['ofport']):\n return iface['ofport']\n\n def get_local_port_mac_in_use(self, port_id):\n iface = self.get_interface_by_id_with_specified_columns(\n port_id, {'mac_in_use'})\n if iface and netaddr.valid_mac(iface['mac_in_use']):\n return iface['mac_in_use']\n\n def _get_port_name_by_id(self, port_id):\n ifaces = self.ovsdb.db_find(\n 'Interface', ('external_ids', '=', {'iface-id': port_id}),\n columns=['external_ids', 'name']).execute()\n for iface in ifaces:\n if (self.integration_bridge !=\n self._get_bridge_for_iface(iface['name'])):\n # iface-id is the port id in neutron, the same neutron port\n # might create multiple interfaces in different bridges\n continue\n\n return iface['name']\n\n def _gen_link_mapping(self, bridge1, bridge2,\n bridge1_link_name=None,\n bridge2_link_name=None):\n if bridge1_link_name is None:\n bridge1_link_name = \"%s-patch\" % bridge2\n if bridge2_link_name is None:\n bridge2_link_name = \"%s-patch\" % bridge1\n\n LOG.debug('genrated mappings {%(bridge1)s: %(link1)s,'\n ' %(bridge2)s: %(link2)s}',\n {'bridge1': bridge1,\n 'link1': bridge1_link_name,\n 'bridge2': bridge2,\n 'link2': bridge2_link_name})\n return (bridge1_link_name, bridge2_link_name)\n\n def map_patch_to_network(self, network, patch_name):\n self.bridge_mapping[network] = patch_name\n\n def get_phy_network_ofport(self, network):\n patch_name = self.bridge_mapping.get(network)\n if patch_name:\n return self.get_port_ofport(patch_name)\n\n def create_patch_pair(self, local_bridge, peer_bridge,\n local_link_name=None, peer_link_name=None):\n links = self._gen_link_mapping(\n local_bridge,\n peer_bridge,\n local_link_name,\n peer_link_name)\n self._create_patch_port(\n local_bridge,\n links[0],\n peer_bridge,\n links[1])\n self._create_patch_port(\n peer_bridge,\n links[1],\n local_bridge,\n links[0])\n return links\n\n def _create_patch_port(self, bridge, port, peer, peer_port):\n if cfg.CONF.df.enable_dpdk:\n self.ovsdb.add_br(bridge, datapath_type='netdev').execute()\n else:\n self.ovsdb.add_br(bridge, datapath_type='system').execute()\n if not self.patch_port_exist(port):\n self.ovsdb.add_patch_port(bridge, port, peer, peer_port).execute()\n\n def patch_port_exist(self, port):\n return 'patch' == self._db_get_val('Interface', port, 'type',\n check_error=False,\n log_errors=False)\n\n def get_port_ofport(self, port):\n return self._db_get_val('Interface', port, 'ofport',\n check_error=False, log_errors=False)\n\n def get_port_mac_in_use(self, port):\n return self._db_get_val('Interface', port, 'mac_in_use',\n check_error=False, log_errors=False)\n\n def get_port_qos(self, port_id):\n port_qoses = self.ovsdb.db_find(\n 'QoS', ('external_ids', '=', {'iface-id': port_id}),\n columns=['external_ids', '_uuid']).execute()\n if port_qoses:\n ovsdb_qos = port_qoses[0]\n external_ids = ovsdb_qos['external_ids']\n return qos.QosPolicy(\n id=external_ids.get('qos-id'),\n topic=external_ids.get('qos-topic'),\n version=external_ids.get('version'),\n )\n\n def set_port_qos(self, port_id, qos):\n port_name = self._get_port_name_by_id(port_id)\n if not port_name:\n return\n\n max_kbps = qos.get_max_kbps()\n max_burst_kbps = qos.get_max_burst_kbps()\n with self.ovsdb.transaction(check_error=True) as txn:\n qos_uuid = txn.add(self.ovsdb.create_qos(port_id, qos))\n txn.add(self.ovsdb.db_set('Interface', port_name,\n ('ingress_policing_rate', max_kbps),\n ('ingress_policing_burst',\n max_burst_kbps)))\n txn.add(self.ovsdb.db_set('Port', port_name, ('qos', qos_uuid)))\n\n def update_port_qos(self, port_id, qos):\n port_name = self._get_port_name_by_id(port_id)\n if not port_name:\n return\n\n max_kbps = qos.get_max_kbps()\n max_burst_kbps = qos.get_max_burst_kbps()\n with self.ovsdb.transaction(check_error=True) as txn:\n txn.add(self.ovsdb.db_set('Interface', port_name,\n ('ingress_policing_rate', max_kbps),\n ('ingress_policing_burst',\n max_burst_kbps)))\n txn.add(self.ovsdb.update_qos(port_id, qos))\n\n def clear_port_qos(self, port_id):\n port_name = self._get_port_name_by_id(port_id)\n if not port_name:\n return\n\n with self.ovsdb.transaction(check_error=True) as txn:\n txn.add(self.ovsdb.db_set('Interface', port_name,\n ('ingress_policing_rate', 0),\n ('ingress_policing_burst', 0)))\n txn.add(self.ovsdb.db_set('Port', port_name, ('qos', [])))\n txn.add(self.ovsdb.delete_qos(port_id))\n\n def delete_port_qos_and_queue(self, port_id):\n self.ovsdb.delete_qos(port_id).execute()\n\n def get_vtp_ofport(self, tunnel_type):\n return self.get_port_ofport(tunnel_type + '-vtp')\n",
"step-ids": [
18,
23,
25,
27,
35
]
}
|
[
18,
23,
25,
27,
35
] |
<|reserved_special_token_0|>
class Computer:
def __init__(self, data, inputs, memory_size=8192, interactive=True):
self._memory = [0] * memory_size
for i in range(len(data)):
self._memory[i] = data[i]
self._pc = 0
self._inputs = deque(inputs)
self._outputs = []
self._relative_base = 0
self._interactive = interactive
def input(self, value):
self._inputs.append(value)
def _parse_modes(self, instruction):
i = '%.5d' % instruction
return int(i[2]), int(i[1]), int(i[0])
def _fetch(self):
instruction = self._memory[self._pc]
self._pc += 1
if instruction > 100:
return instruction % 100, self._parse_modes(instruction)
else:
return instruction, (MODE_POSITION, MODE_POSITION, MODE_POSITION)
def _pop(self):
v = self._memory[self._pc]
self._pc += 1
return v
def _load(self, a, mode):
if mode == MODE_IMMEDIATE:
return a
elif mode == MODE_POSITION:
return self._memory[a]
elif mode == MODE_RELATIVE:
return self._memory[self._relative_base + a]
else:
raise InvalidModeException()
def _store(self, a, mode, v):
if mode == MODE_IMMEDIATE:
pass
if mode == MODE_POSITION:
self._memory[a] = v
elif mode == MODE_RELATIVE:
self._memory[self._relative_base + a] = v
else:
raise InvalidModeException()
def _add(self, modes, a, b, d):
self._store(d, modes[2], self._load(a, modes[0]) + self._load(b,
modes[1]))
def _multiply(self, modes, a, b, d):
self._store(d, modes[2], self._load(a, modes[0]) * self._load(b,
modes[1]))
def _input(self, modes, a):
if self._interactive:
self._store(a, modes[0], int(input('=> ')))
else:
self._store(a, modes[0], self._inputs.popleft())
def _output(self, modes, s):
v = self._load(s, modes[0])
if self._interactive:
print(v)
else:
self._outputs.append(v)
def _jump_if_true(self, modes, a, d):
if self._load(a, modes[0]) != 0:
self._pc = self._load(d, modes[1])
def _jump_if_false(self, modes, a, d):
if self._load(a, modes[0]) == 0:
self._pc = self._load(d, modes[1])
def _less_than(self, modes, a, b, d):
if self._load(a, modes[0]) < self._load(b, modes[1]):
self._store(d, modes[2], 1)
else:
self._store(d, modes[2], 0)
def _equals(self, modes, a, b, d):
if self._load(a, modes[0]) == self._load(b, modes[1]):
self._store(d, modes[2], 1)
else:
self._store(d, modes[2], 0)
def _adjust_relative_base(self, modes, a):
self._relative_base += self._load(a, modes[0])
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class InvalidModeException(Exception):
pass
class Computer:
def __init__(self, data, inputs, memory_size=8192, interactive=True):
self._memory = [0] * memory_size
for i in range(len(data)):
self._memory[i] = data[i]
self._pc = 0
self._inputs = deque(inputs)
self._outputs = []
self._relative_base = 0
self._interactive = interactive
def input(self, value):
self._inputs.append(value)
def _parse_modes(self, instruction):
i = '%.5d' % instruction
return int(i[2]), int(i[1]), int(i[0])
def _fetch(self):
instruction = self._memory[self._pc]
self._pc += 1
if instruction > 100:
return instruction % 100, self._parse_modes(instruction)
else:
return instruction, (MODE_POSITION, MODE_POSITION, MODE_POSITION)
def _pop(self):
v = self._memory[self._pc]
self._pc += 1
return v
def _load(self, a, mode):
if mode == MODE_IMMEDIATE:
return a
elif mode == MODE_POSITION:
return self._memory[a]
elif mode == MODE_RELATIVE:
return self._memory[self._relative_base + a]
else:
raise InvalidModeException()
def _store(self, a, mode, v):
if mode == MODE_IMMEDIATE:
pass
if mode == MODE_POSITION:
self._memory[a] = v
elif mode == MODE_RELATIVE:
self._memory[self._relative_base + a] = v
else:
raise InvalidModeException()
def _add(self, modes, a, b, d):
self._store(d, modes[2], self._load(a, modes[0]) + self._load(b,
modes[1]))
def _multiply(self, modes, a, b, d):
self._store(d, modes[2], self._load(a, modes[0]) * self._load(b,
modes[1]))
def _input(self, modes, a):
if self._interactive:
self._store(a, modes[0], int(input('=> ')))
else:
self._store(a, modes[0], self._inputs.popleft())
def _output(self, modes, s):
v = self._load(s, modes[0])
if self._interactive:
print(v)
else:
self._outputs.append(v)
def _jump_if_true(self, modes, a, d):
if self._load(a, modes[0]) != 0:
self._pc = self._load(d, modes[1])
def _jump_if_false(self, modes, a, d):
if self._load(a, modes[0]) == 0:
self._pc = self._load(d, modes[1])
def _less_than(self, modes, a, b, d):
if self._load(a, modes[0]) < self._load(b, modes[1]):
self._store(d, modes[2], 1)
else:
self._store(d, modes[2], 0)
def _equals(self, modes, a, b, d):
if self._load(a, modes[0]) == self._load(b, modes[1]):
self._store(d, modes[2], 1)
else:
self._store(d, modes[2], 0)
def _adjust_relative_base(self, modes, a):
self._relative_base += self._load(a, modes[0])
def run(self, debug=False):
while True:
instruction, modes = self._fetch()
if debug:
print(instruction, modes)
if instruction == INS_ADD:
self._add(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_MULTIPLY:
self._multiply(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_INPUT:
self._input(modes, self._pop())
elif instruction == INS_OUTPUT:
v = self._output(modes, self._pop())
if not self._interactive:
return v
elif instruction == INS_JUMP_IF_TRUE:
self._jump_if_true(modes, self._pop(), self._pop())
elif instruction == INS_JUMP_IF_FALSE:
self._jump_if_false(modes, self._pop(), self._pop())
elif instruction == INS_LESS_THAN:
self._less_than(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_EQUALS:
self._equals(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_ADJUST_RELATIVE_BASE:
self._adjust_relative_base(modes, self._pop())
elif instruction == INS_DONE:
return self._outputs
else:
raise InvalidInstructionException(instruction)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class InvalidInstructionException(Exception):
def __init__(self, instruction):
super().__init__('<%d>' % instruction)
class InvalidModeException(Exception):
pass
class Computer:
def __init__(self, data, inputs, memory_size=8192, interactive=True):
self._memory = [0] * memory_size
for i in range(len(data)):
self._memory[i] = data[i]
self._pc = 0
self._inputs = deque(inputs)
self._outputs = []
self._relative_base = 0
self._interactive = interactive
def input(self, value):
self._inputs.append(value)
def _parse_modes(self, instruction):
i = '%.5d' % instruction
return int(i[2]), int(i[1]), int(i[0])
def _fetch(self):
instruction = self._memory[self._pc]
self._pc += 1
if instruction > 100:
return instruction % 100, self._parse_modes(instruction)
else:
return instruction, (MODE_POSITION, MODE_POSITION, MODE_POSITION)
def _pop(self):
v = self._memory[self._pc]
self._pc += 1
return v
def _load(self, a, mode):
if mode == MODE_IMMEDIATE:
return a
elif mode == MODE_POSITION:
return self._memory[a]
elif mode == MODE_RELATIVE:
return self._memory[self._relative_base + a]
else:
raise InvalidModeException()
def _store(self, a, mode, v):
if mode == MODE_IMMEDIATE:
pass
if mode == MODE_POSITION:
self._memory[a] = v
elif mode == MODE_RELATIVE:
self._memory[self._relative_base + a] = v
else:
raise InvalidModeException()
def _add(self, modes, a, b, d):
self._store(d, modes[2], self._load(a, modes[0]) + self._load(b,
modes[1]))
def _multiply(self, modes, a, b, d):
self._store(d, modes[2], self._load(a, modes[0]) * self._load(b,
modes[1]))
def _input(self, modes, a):
if self._interactive:
self._store(a, modes[0], int(input('=> ')))
else:
self._store(a, modes[0], self._inputs.popleft())
def _output(self, modes, s):
v = self._load(s, modes[0])
if self._interactive:
print(v)
else:
self._outputs.append(v)
def _jump_if_true(self, modes, a, d):
if self._load(a, modes[0]) != 0:
self._pc = self._load(d, modes[1])
def _jump_if_false(self, modes, a, d):
if self._load(a, modes[0]) == 0:
self._pc = self._load(d, modes[1])
def _less_than(self, modes, a, b, d):
if self._load(a, modes[0]) < self._load(b, modes[1]):
self._store(d, modes[2], 1)
else:
self._store(d, modes[2], 0)
def _equals(self, modes, a, b, d):
if self._load(a, modes[0]) == self._load(b, modes[1]):
self._store(d, modes[2], 1)
else:
self._store(d, modes[2], 0)
def _adjust_relative_base(self, modes, a):
self._relative_base += self._load(a, modes[0])
def run(self, debug=False):
while True:
instruction, modes = self._fetch()
if debug:
print(instruction, modes)
if instruction == INS_ADD:
self._add(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_MULTIPLY:
self._multiply(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_INPUT:
self._input(modes, self._pop())
elif instruction == INS_OUTPUT:
v = self._output(modes, self._pop())
if not self._interactive:
return v
elif instruction == INS_JUMP_IF_TRUE:
self._jump_if_true(modes, self._pop(), self._pop())
elif instruction == INS_JUMP_IF_FALSE:
self._jump_if_false(modes, self._pop(), self._pop())
elif instruction == INS_LESS_THAN:
self._less_than(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_EQUALS:
self._equals(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_ADJUST_RELATIVE_BASE:
self._adjust_relative_base(modes, self._pop())
elif instruction == INS_DONE:
return self._outputs
else:
raise InvalidInstructionException(instruction)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class InvalidInstructionException(Exception):
def __init__(self, instruction):
super().__init__('<%d>' % instruction)
class InvalidModeException(Exception):
pass
class Computer:
def __init__(self, data, inputs, memory_size=8192, interactive=True):
self._memory = [0] * memory_size
for i in range(len(data)):
self._memory[i] = data[i]
self._pc = 0
self._inputs = deque(inputs)
self._outputs = []
self._relative_base = 0
self._interactive = interactive
def input(self, value):
self._inputs.append(value)
def _parse_modes(self, instruction):
i = '%.5d' % instruction
return int(i[2]), int(i[1]), int(i[0])
def _fetch(self):
instruction = self._memory[self._pc]
self._pc += 1
if instruction > 100:
return instruction % 100, self._parse_modes(instruction)
else:
return instruction, (MODE_POSITION, MODE_POSITION, MODE_POSITION)
def _pop(self):
v = self._memory[self._pc]
self._pc += 1
return v
def _load(self, a, mode):
if mode == MODE_IMMEDIATE:
return a
elif mode == MODE_POSITION:
return self._memory[a]
elif mode == MODE_RELATIVE:
return self._memory[self._relative_base + a]
else:
raise InvalidModeException()
def _store(self, a, mode, v):
if mode == MODE_IMMEDIATE:
pass
if mode == MODE_POSITION:
self._memory[a] = v
elif mode == MODE_RELATIVE:
self._memory[self._relative_base + a] = v
else:
raise InvalidModeException()
def _add(self, modes, a, b, d):
self._store(d, modes[2], self._load(a, modes[0]) + self._load(b,
modes[1]))
def _multiply(self, modes, a, b, d):
self._store(d, modes[2], self._load(a, modes[0]) * self._load(b,
modes[1]))
def _input(self, modes, a):
if self._interactive:
self._store(a, modes[0], int(input('=> ')))
else:
self._store(a, modes[0], self._inputs.popleft())
def _output(self, modes, s):
v = self._load(s, modes[0])
if self._interactive:
print(v)
else:
self._outputs.append(v)
def _jump_if_true(self, modes, a, d):
if self._load(a, modes[0]) != 0:
self._pc = self._load(d, modes[1])
def _jump_if_false(self, modes, a, d):
if self._load(a, modes[0]) == 0:
self._pc = self._load(d, modes[1])
def _less_than(self, modes, a, b, d):
if self._load(a, modes[0]) < self._load(b, modes[1]):
self._store(d, modes[2], 1)
else:
self._store(d, modes[2], 0)
def _equals(self, modes, a, b, d):
if self._load(a, modes[0]) == self._load(b, modes[1]):
self._store(d, modes[2], 1)
else:
self._store(d, modes[2], 0)
def _adjust_relative_base(self, modes, a):
self._relative_base += self._load(a, modes[0])
def run(self, debug=False):
while True:
instruction, modes = self._fetch()
if debug:
print(instruction, modes)
if instruction == INS_ADD:
self._add(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_MULTIPLY:
self._multiply(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_INPUT:
self._input(modes, self._pop())
elif instruction == INS_OUTPUT:
v = self._output(modes, self._pop())
if not self._interactive:
return v
elif instruction == INS_JUMP_IF_TRUE:
self._jump_if_true(modes, self._pop(), self._pop())
elif instruction == INS_JUMP_IF_FALSE:
self._jump_if_false(modes, self._pop(), self._pop())
elif instruction == INS_LESS_THAN:
self._less_than(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_EQUALS:
self._equals(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_ADJUST_RELATIVE_BASE:
self._adjust_relative_base(modes, self._pop())
elif instruction == INS_DONE:
return self._outputs
else:
raise InvalidInstructionException(instruction)
<|reserved_special_token_0|>
if __name__ == '__main__':
c = Computer(PROGRAM, [])
c.run()
<|reserved_special_token_1|>
#!/usr/bin/env python3
from collections import deque
from itertools import permutations
INS_ADD = 1
INS_MULTIPLY = 2
INS_INPUT = 3
INS_OUTPUT = 4
INS_JUMP_IF_TRUE = 5
INS_JUMP_IF_FALSE = 6
INS_LESS_THAN = 7
INS_EQUALS = 8
INS_ADJUST_RELATIVE_BASE = 9
INS_DONE = 99
MODE_POSITION = 0
MODE_IMMEDIATE = 1
MODE_RELATIVE = 2
class InvalidInstructionException (Exception):
def __init__(self, instruction):
super().__init__("<%d>" % instruction)
class InvalidModeException (Exception):
pass
class Computer:
def __init__(self, data, inputs, memory_size=8192, interactive=True):
self._memory = [0] * memory_size
for i in range(len(data)):
self._memory[i] = data[i]
self._pc = 0
self._inputs = deque(inputs)
self._outputs = []
self._relative_base = 0
self._interactive = interactive
def input(self, value):
self._inputs.append(value)
def _parse_modes(self, instruction):
i = "%.5d" % instruction
return (int(i[2]), int(i[1]), int(i[0]))
def _fetch(self):
instruction = self._memory[self._pc]
self._pc += 1
if instruction > 100:
return instruction % 100, self._parse_modes(instruction)
else:
return instruction, (MODE_POSITION, MODE_POSITION, MODE_POSITION)
def _pop(self):
v = self._memory[self._pc]
self._pc += 1
return v
def _load(self, a, mode):
if mode == MODE_IMMEDIATE:
return a
elif mode == MODE_POSITION:
return self._memory[a]
elif mode == MODE_RELATIVE:
return self._memory[self._relative_base + a]
else:
raise InvalidModeException()
def _store(self, a, mode, v):
if mode == MODE_IMMEDIATE:
pass
if mode == MODE_POSITION:
self._memory[a] = v
elif mode == MODE_RELATIVE:
self._memory[self._relative_base + a] = v
else:
raise InvalidModeException()
def _add(self, modes, a, b, d):
self._store(d, modes[2], self._load(a, modes[0]) + self._load(b, modes[1]))
def _multiply(self, modes, a, b, d):
self._store(d, modes[2], self._load(a, modes[0]) * self._load(b, modes[1]))
def _input(self, modes, a):
if self._interactive:
self._store(a, modes[0], int(input("=> ")))
else:
self._store(a, modes[0], self._inputs.popleft())
def _output(self, modes, s):
v = self._load(s, modes[0])
if self._interactive:
print(v)
else:
self._outputs.append(v)
def _jump_if_true(self, modes, a, d):
if self._load(a, modes[0]) != 0:
self._pc = self._load(d, modes[1])
def _jump_if_false(self, modes, a, d):
if self._load(a, modes[0]) == 0:
self._pc = self._load(d, modes[1])
def _less_than(self, modes, a, b, d):
if self._load(a, modes[0]) < self._load(b, modes[1]):
self._store(d, modes[2], 1)
else:
self._store(d, modes[2], 0)
def _equals(self, modes, a, b, d):
if self._load(a, modes[0]) == self._load(b, modes[1]):
self._store(d, modes[2], 1)
else:
self._store(d, modes[2], 0)
def _adjust_relative_base(self, modes, a):
self._relative_base += self._load(a, modes[0])
def run(self, debug = False):
while True:
instruction, modes = self._fetch()
if debug:
print(instruction, modes)
if instruction == INS_ADD:
self._add(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_MULTIPLY:
self._multiply(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_INPUT:
self._input(modes, self._pop())
elif instruction == INS_OUTPUT:
v = self._output(modes, self._pop())
if not self._interactive:
return v
elif instruction == INS_JUMP_IF_TRUE:
self._jump_if_true(modes, self._pop(), self._pop())
elif instruction == INS_JUMP_IF_FALSE:
self._jump_if_false(modes, self._pop(), self._pop())
elif instruction == INS_LESS_THAN:
self._less_than(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_EQUALS:
self._equals(modes, self._pop(), self._pop(), self._pop())
elif instruction == INS_ADJUST_RELATIVE_BASE:
self._adjust_relative_base(modes, self._pop())
elif instruction == INS_DONE:
return self._outputs
else:
raise InvalidInstructionException(instruction)
PROGRAM = [1102,34463338,34463338,63,1007,63,34463338,63,1005,63,53,1101,0,3,1000,109,988,209,12,9,1000,209,6,209,3,203,0,1008,1000,1,63,1005,63,65,1008,1000,2,63,1005,63,904,1008,1000,0,63,1005,63,58,4,25,104,0,99,4,0,104,0,99,4,17,104,0,99,0,0,1101,0,396,1029,1101,0,356,1023,1101,401,0,1028,1101,24,0,1008,1101,33,0,1019,1101,35,0,1010,1102,359,1,1022,1102,32,1,1001,1101,37,0,1004,1101,0,31,1009,1101,0,30,1003,1101,28,0,1002,1102,1,36,1014,1102,20,1,1012,1101,21,0,1000,1101,0,22,1015,1102,23,1,1013,1102,1,1,1021,1102,1,39,1007,1102,26,1,1017,1101,0,38,1016,1101,0,437,1024,1102,432,1,1025,1101,0,421,1026,1101,0,29,1005,1101,27,0,1011,1102,1,0,1020,1101,0,25,1018,1101,0,414,1027,1102,34,1,1006,109,6,2108,33,-3,63,1005,63,201,1001,64,1,64,1105,1,203,4,187,1002,64,2,64,109,14,21108,40,40,-6,1005,1014,221,4,209,1105,1,225,1001,64,1,64,1002,64,2,64,109,-21,2102,1,3,63,1008,63,28,63,1005,63,251,4,231,1001,64,1,64,1106,0,251,1002,64,2,64,109,12,2101,0,-3,63,1008,63,21,63,1005,63,275,1001,64,1,64,1105,1,277,4,257,1002,64,2,64,109,-10,1207,1,27,63,1005,63,293,1105,1,299,4,283,1001,64,1,64,1002,64,2,64,109,9,21108,41,42,3,1005,1013,315,1105,1,321,4,305,1001,64,1,64,1002,64,2,64,109,-12,1202,6,1,63,1008,63,37,63,1005,63,347,4,327,1001,64,1,64,1105,1,347,1002,64,2,64,109,29,2105,1,-4,1105,1,365,4,353,1001,64,1,64,1002,64,2,64,109,-17,2108,32,-9,63,1005,63,387,4,371,1001,64,1,64,1105,1,387,1002,64,2,64,109,17,2106,0,1,4,393,1105,1,405,1001,64,1,64,1002,64,2,64,109,1,2106,0,-1,1001,64,1,64,1106,0,423,4,411,1002,64,2,64,109,-13,2105,1,9,4,429,1106,0,441,1001,64,1,64,1002,64,2,64,109,3,21107,42,41,-1,1005,1017,461,1001,64,1,64,1106,0,463,4,447,1002,64,2,64,109,-4,21107,43,44,1,1005,1015,481,4,469,1106,0,485,1001,64,1,64,1002,64,2,64,109,-6,21101,44,0,6,1008,1014,47,63,1005,63,505,1106,0,511,4,491,1001,64,1,64,1002,64,2,64,109,-6,1208,-1,32,63,1005,63,529,4,517,1105,1,533,1001,64,1,64,1002,64,2,64,109,11,1205,7,545,1106,0,551,4,539,1001,64,1,64,1002,64,2,64,109,11,21102,45,1,-7,1008,1017,48,63,1005,63,575,1001,64,1,64,1106,0,577,4,557,1002,64,2,64,109,-8,1206,5,593,1001,64,1,64,1105,1,595,4,583,1002,64,2,64,109,7,1206,-3,609,4,601,1106,0,613,1001,64,1,64,1002,64,2,64,109,-10,2101,0,-6,63,1008,63,39,63,1005,63,635,4,619,1106,0,639,1001,64,1,64,1002,64,2,64,109,-9,1208,0,39,63,1005,63,655,1106,0,661,4,645,1001,64,1,64,1002,64,2,64,109,4,2107,25,0,63,1005,63,681,1001,64,1,64,1105,1,683,4,667,1002,64,2,64,109,-5,2107,31,-2,63,1005,63,701,4,689,1106,0,705,1001,64,1,64,1002,64,2,64,109,19,1205,-1,719,4,711,1105,1,723,1001,64,1,64,1002,64,2,64,109,-17,1201,3,0,63,1008,63,24,63,1005,63,745,4,729,1106,0,749,1001,64,1,64,1002,64,2,64,109,13,21102,46,1,-3,1008,1015,46,63,1005,63,771,4,755,1105,1,775,1001,64,1,64,1002,64,2,64,109,-13,1207,4,32,63,1005,63,793,4,781,1106,0,797,1001,64,1,64,1002,64,2,64,109,7,2102,1,-9,63,1008,63,27,63,1005,63,821,1001,64,1,64,1105,1,823,4,803,1002,64,2,64,109,-18,1201,8,0,63,1008,63,25,63,1005,63,847,1001,64,1,64,1106,0,849,4,829,1002,64,2,64,109,23,21101,47,0,2,1008,1019,47,63,1005,63,871,4,855,1106,0,875,1001,64,1,64,1002,64,2,64,109,-22,1202,5,1,63,1008,63,19,63,1005,63,899,1001,64,1,64,1106,0,901,4,881,4,64,99,21102,27,1,1,21102,1,915,0,1105,1,922,21201,1,25165,1,204,1,99,109,3,1207,-2,3,63,1005,63,964,21201,-2,-1,1,21102,942,1,0,1105,1,922,22102,1,1,-1,21201,-2,-3,1,21101,0,957,0,1105,1,922,22201,1,-1,-2,1106,0,968,21201,-2,0,-2,109,-3,2105,1,0]
if __name__ == "__main__":
c = Computer(PROGRAM, [])
c.run()
|
flexible
|
{
"blob_id": "121fddf022c4eed7fd00e81edcb2df6a7a3b7510",
"index": 4903,
"step-1": "<mask token>\n\n\nclass Computer:\n\n def __init__(self, data, inputs, memory_size=8192, interactive=True):\n self._memory = [0] * memory_size\n for i in range(len(data)):\n self._memory[i] = data[i]\n self._pc = 0\n self._inputs = deque(inputs)\n self._outputs = []\n self._relative_base = 0\n self._interactive = interactive\n\n def input(self, value):\n self._inputs.append(value)\n\n def _parse_modes(self, instruction):\n i = '%.5d' % instruction\n return int(i[2]), int(i[1]), int(i[0])\n\n def _fetch(self):\n instruction = self._memory[self._pc]\n self._pc += 1\n if instruction > 100:\n return instruction % 100, self._parse_modes(instruction)\n else:\n return instruction, (MODE_POSITION, MODE_POSITION, MODE_POSITION)\n\n def _pop(self):\n v = self._memory[self._pc]\n self._pc += 1\n return v\n\n def _load(self, a, mode):\n if mode == MODE_IMMEDIATE:\n return a\n elif mode == MODE_POSITION:\n return self._memory[a]\n elif mode == MODE_RELATIVE:\n return self._memory[self._relative_base + a]\n else:\n raise InvalidModeException()\n\n def _store(self, a, mode, v):\n if mode == MODE_IMMEDIATE:\n pass\n if mode == MODE_POSITION:\n self._memory[a] = v\n elif mode == MODE_RELATIVE:\n self._memory[self._relative_base + a] = v\n else:\n raise InvalidModeException()\n\n def _add(self, modes, a, b, d):\n self._store(d, modes[2], self._load(a, modes[0]) + self._load(b,\n modes[1]))\n\n def _multiply(self, modes, a, b, d):\n self._store(d, modes[2], self._load(a, modes[0]) * self._load(b,\n modes[1]))\n\n def _input(self, modes, a):\n if self._interactive:\n self._store(a, modes[0], int(input('=> ')))\n else:\n self._store(a, modes[0], self._inputs.popleft())\n\n def _output(self, modes, s):\n v = self._load(s, modes[0])\n if self._interactive:\n print(v)\n else:\n self._outputs.append(v)\n\n def _jump_if_true(self, modes, a, d):\n if self._load(a, modes[0]) != 0:\n self._pc = self._load(d, modes[1])\n\n def _jump_if_false(self, modes, a, d):\n if self._load(a, modes[0]) == 0:\n self._pc = self._load(d, modes[1])\n\n def _less_than(self, modes, a, b, d):\n if self._load(a, modes[0]) < self._load(b, modes[1]):\n self._store(d, modes[2], 1)\n else:\n self._store(d, modes[2], 0)\n\n def _equals(self, modes, a, b, d):\n if self._load(a, modes[0]) == self._load(b, modes[1]):\n self._store(d, modes[2], 1)\n else:\n self._store(d, modes[2], 0)\n\n def _adjust_relative_base(self, modes, a):\n self._relative_base += self._load(a, modes[0])\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass InvalidModeException(Exception):\n pass\n\n\nclass Computer:\n\n def __init__(self, data, inputs, memory_size=8192, interactive=True):\n self._memory = [0] * memory_size\n for i in range(len(data)):\n self._memory[i] = data[i]\n self._pc = 0\n self._inputs = deque(inputs)\n self._outputs = []\n self._relative_base = 0\n self._interactive = interactive\n\n def input(self, value):\n self._inputs.append(value)\n\n def _parse_modes(self, instruction):\n i = '%.5d' % instruction\n return int(i[2]), int(i[1]), int(i[0])\n\n def _fetch(self):\n instruction = self._memory[self._pc]\n self._pc += 1\n if instruction > 100:\n return instruction % 100, self._parse_modes(instruction)\n else:\n return instruction, (MODE_POSITION, MODE_POSITION, MODE_POSITION)\n\n def _pop(self):\n v = self._memory[self._pc]\n self._pc += 1\n return v\n\n def _load(self, a, mode):\n if mode == MODE_IMMEDIATE:\n return a\n elif mode == MODE_POSITION:\n return self._memory[a]\n elif mode == MODE_RELATIVE:\n return self._memory[self._relative_base + a]\n else:\n raise InvalidModeException()\n\n def _store(self, a, mode, v):\n if mode == MODE_IMMEDIATE:\n pass\n if mode == MODE_POSITION:\n self._memory[a] = v\n elif mode == MODE_RELATIVE:\n self._memory[self._relative_base + a] = v\n else:\n raise InvalidModeException()\n\n def _add(self, modes, a, b, d):\n self._store(d, modes[2], self._load(a, modes[0]) + self._load(b,\n modes[1]))\n\n def _multiply(self, modes, a, b, d):\n self._store(d, modes[2], self._load(a, modes[0]) * self._load(b,\n modes[1]))\n\n def _input(self, modes, a):\n if self._interactive:\n self._store(a, modes[0], int(input('=> ')))\n else:\n self._store(a, modes[0], self._inputs.popleft())\n\n def _output(self, modes, s):\n v = self._load(s, modes[0])\n if self._interactive:\n print(v)\n else:\n self._outputs.append(v)\n\n def _jump_if_true(self, modes, a, d):\n if self._load(a, modes[0]) != 0:\n self._pc = self._load(d, modes[1])\n\n def _jump_if_false(self, modes, a, d):\n if self._load(a, modes[0]) == 0:\n self._pc = self._load(d, modes[1])\n\n def _less_than(self, modes, a, b, d):\n if self._load(a, modes[0]) < self._load(b, modes[1]):\n self._store(d, modes[2], 1)\n else:\n self._store(d, modes[2], 0)\n\n def _equals(self, modes, a, b, d):\n if self._load(a, modes[0]) == self._load(b, modes[1]):\n self._store(d, modes[2], 1)\n else:\n self._store(d, modes[2], 0)\n\n def _adjust_relative_base(self, modes, a):\n self._relative_base += self._load(a, modes[0])\n\n def run(self, debug=False):\n while True:\n instruction, modes = self._fetch()\n if debug:\n print(instruction, modes)\n if instruction == INS_ADD:\n self._add(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_MULTIPLY:\n self._multiply(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_INPUT:\n self._input(modes, self._pop())\n elif instruction == INS_OUTPUT:\n v = self._output(modes, self._pop())\n if not self._interactive:\n return v\n elif instruction == INS_JUMP_IF_TRUE:\n self._jump_if_true(modes, self._pop(), self._pop())\n elif instruction == INS_JUMP_IF_FALSE:\n self._jump_if_false(modes, self._pop(), self._pop())\n elif instruction == INS_LESS_THAN:\n self._less_than(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_EQUALS:\n self._equals(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_ADJUST_RELATIVE_BASE:\n self._adjust_relative_base(modes, self._pop())\n elif instruction == INS_DONE:\n return self._outputs\n else:\n raise InvalidInstructionException(instruction)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass InvalidInstructionException(Exception):\n\n def __init__(self, instruction):\n super().__init__('<%d>' % instruction)\n\n\nclass InvalidModeException(Exception):\n pass\n\n\nclass Computer:\n\n def __init__(self, data, inputs, memory_size=8192, interactive=True):\n self._memory = [0] * memory_size\n for i in range(len(data)):\n self._memory[i] = data[i]\n self._pc = 0\n self._inputs = deque(inputs)\n self._outputs = []\n self._relative_base = 0\n self._interactive = interactive\n\n def input(self, value):\n self._inputs.append(value)\n\n def _parse_modes(self, instruction):\n i = '%.5d' % instruction\n return int(i[2]), int(i[1]), int(i[0])\n\n def _fetch(self):\n instruction = self._memory[self._pc]\n self._pc += 1\n if instruction > 100:\n return instruction % 100, self._parse_modes(instruction)\n else:\n return instruction, (MODE_POSITION, MODE_POSITION, MODE_POSITION)\n\n def _pop(self):\n v = self._memory[self._pc]\n self._pc += 1\n return v\n\n def _load(self, a, mode):\n if mode == MODE_IMMEDIATE:\n return a\n elif mode == MODE_POSITION:\n return self._memory[a]\n elif mode == MODE_RELATIVE:\n return self._memory[self._relative_base + a]\n else:\n raise InvalidModeException()\n\n def _store(self, a, mode, v):\n if mode == MODE_IMMEDIATE:\n pass\n if mode == MODE_POSITION:\n self._memory[a] = v\n elif mode == MODE_RELATIVE:\n self._memory[self._relative_base + a] = v\n else:\n raise InvalidModeException()\n\n def _add(self, modes, a, b, d):\n self._store(d, modes[2], self._load(a, modes[0]) + self._load(b,\n modes[1]))\n\n def _multiply(self, modes, a, b, d):\n self._store(d, modes[2], self._load(a, modes[0]) * self._load(b,\n modes[1]))\n\n def _input(self, modes, a):\n if self._interactive:\n self._store(a, modes[0], int(input('=> ')))\n else:\n self._store(a, modes[0], self._inputs.popleft())\n\n def _output(self, modes, s):\n v = self._load(s, modes[0])\n if self._interactive:\n print(v)\n else:\n self._outputs.append(v)\n\n def _jump_if_true(self, modes, a, d):\n if self._load(a, modes[0]) != 0:\n self._pc = self._load(d, modes[1])\n\n def _jump_if_false(self, modes, a, d):\n if self._load(a, modes[0]) == 0:\n self._pc = self._load(d, modes[1])\n\n def _less_than(self, modes, a, b, d):\n if self._load(a, modes[0]) < self._load(b, modes[1]):\n self._store(d, modes[2], 1)\n else:\n self._store(d, modes[2], 0)\n\n def _equals(self, modes, a, b, d):\n if self._load(a, modes[0]) == self._load(b, modes[1]):\n self._store(d, modes[2], 1)\n else:\n self._store(d, modes[2], 0)\n\n def _adjust_relative_base(self, modes, a):\n self._relative_base += self._load(a, modes[0])\n\n def run(self, debug=False):\n while True:\n instruction, modes = self._fetch()\n if debug:\n print(instruction, modes)\n if instruction == INS_ADD:\n self._add(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_MULTIPLY:\n self._multiply(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_INPUT:\n self._input(modes, self._pop())\n elif instruction == INS_OUTPUT:\n v = self._output(modes, self._pop())\n if not self._interactive:\n return v\n elif instruction == INS_JUMP_IF_TRUE:\n self._jump_if_true(modes, self._pop(), self._pop())\n elif instruction == INS_JUMP_IF_FALSE:\n self._jump_if_false(modes, self._pop(), self._pop())\n elif instruction == INS_LESS_THAN:\n self._less_than(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_EQUALS:\n self._equals(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_ADJUST_RELATIVE_BASE:\n self._adjust_relative_base(modes, self._pop())\n elif instruction == INS_DONE:\n return self._outputs\n else:\n raise InvalidInstructionException(instruction)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass InvalidInstructionException(Exception):\n\n def __init__(self, instruction):\n super().__init__('<%d>' % instruction)\n\n\nclass InvalidModeException(Exception):\n pass\n\n\nclass Computer:\n\n def __init__(self, data, inputs, memory_size=8192, interactive=True):\n self._memory = [0] * memory_size\n for i in range(len(data)):\n self._memory[i] = data[i]\n self._pc = 0\n self._inputs = deque(inputs)\n self._outputs = []\n self._relative_base = 0\n self._interactive = interactive\n\n def input(self, value):\n self._inputs.append(value)\n\n def _parse_modes(self, instruction):\n i = '%.5d' % instruction\n return int(i[2]), int(i[1]), int(i[0])\n\n def _fetch(self):\n instruction = self._memory[self._pc]\n self._pc += 1\n if instruction > 100:\n return instruction % 100, self._parse_modes(instruction)\n else:\n return instruction, (MODE_POSITION, MODE_POSITION, MODE_POSITION)\n\n def _pop(self):\n v = self._memory[self._pc]\n self._pc += 1\n return v\n\n def _load(self, a, mode):\n if mode == MODE_IMMEDIATE:\n return a\n elif mode == MODE_POSITION:\n return self._memory[a]\n elif mode == MODE_RELATIVE:\n return self._memory[self._relative_base + a]\n else:\n raise InvalidModeException()\n\n def _store(self, a, mode, v):\n if mode == MODE_IMMEDIATE:\n pass\n if mode == MODE_POSITION:\n self._memory[a] = v\n elif mode == MODE_RELATIVE:\n self._memory[self._relative_base + a] = v\n else:\n raise InvalidModeException()\n\n def _add(self, modes, a, b, d):\n self._store(d, modes[2], self._load(a, modes[0]) + self._load(b,\n modes[1]))\n\n def _multiply(self, modes, a, b, d):\n self._store(d, modes[2], self._load(a, modes[0]) * self._load(b,\n modes[1]))\n\n def _input(self, modes, a):\n if self._interactive:\n self._store(a, modes[0], int(input('=> ')))\n else:\n self._store(a, modes[0], self._inputs.popleft())\n\n def _output(self, modes, s):\n v = self._load(s, modes[0])\n if self._interactive:\n print(v)\n else:\n self._outputs.append(v)\n\n def _jump_if_true(self, modes, a, d):\n if self._load(a, modes[0]) != 0:\n self._pc = self._load(d, modes[1])\n\n def _jump_if_false(self, modes, a, d):\n if self._load(a, modes[0]) == 0:\n self._pc = self._load(d, modes[1])\n\n def _less_than(self, modes, a, b, d):\n if self._load(a, modes[0]) < self._load(b, modes[1]):\n self._store(d, modes[2], 1)\n else:\n self._store(d, modes[2], 0)\n\n def _equals(self, modes, a, b, d):\n if self._load(a, modes[0]) == self._load(b, modes[1]):\n self._store(d, modes[2], 1)\n else:\n self._store(d, modes[2], 0)\n\n def _adjust_relative_base(self, modes, a):\n self._relative_base += self._load(a, modes[0])\n\n def run(self, debug=False):\n while True:\n instruction, modes = self._fetch()\n if debug:\n print(instruction, modes)\n if instruction == INS_ADD:\n self._add(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_MULTIPLY:\n self._multiply(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_INPUT:\n self._input(modes, self._pop())\n elif instruction == INS_OUTPUT:\n v = self._output(modes, self._pop())\n if not self._interactive:\n return v\n elif instruction == INS_JUMP_IF_TRUE:\n self._jump_if_true(modes, self._pop(), self._pop())\n elif instruction == INS_JUMP_IF_FALSE:\n self._jump_if_false(modes, self._pop(), self._pop())\n elif instruction == INS_LESS_THAN:\n self._less_than(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_EQUALS:\n self._equals(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_ADJUST_RELATIVE_BASE:\n self._adjust_relative_base(modes, self._pop())\n elif instruction == INS_DONE:\n return self._outputs\n else:\n raise InvalidInstructionException(instruction)\n\n\n<mask token>\nif __name__ == '__main__':\n c = Computer(PROGRAM, [])\n c.run()\n",
"step-5": "#!/usr/bin/env python3\n\n\nfrom collections import deque\nfrom itertools import permutations\n\n\nINS_ADD = 1\nINS_MULTIPLY = 2\nINS_INPUT = 3\nINS_OUTPUT = 4\nINS_JUMP_IF_TRUE = 5\nINS_JUMP_IF_FALSE = 6\nINS_LESS_THAN = 7\nINS_EQUALS = 8\nINS_ADJUST_RELATIVE_BASE = 9\nINS_DONE = 99\n\nMODE_POSITION = 0\nMODE_IMMEDIATE = 1\nMODE_RELATIVE = 2\n\n\nclass InvalidInstructionException (Exception):\n def __init__(self, instruction):\n super().__init__(\"<%d>\" % instruction)\n\n\nclass InvalidModeException (Exception):\n pass\n\n\nclass Computer:\n\n def __init__(self, data, inputs, memory_size=8192, interactive=True):\n self._memory = [0] * memory_size\n for i in range(len(data)):\n self._memory[i] = data[i]\n self._pc = 0\n self._inputs = deque(inputs)\n self._outputs = []\n self._relative_base = 0\n self._interactive = interactive\n\n def input(self, value):\n self._inputs.append(value)\n \n def _parse_modes(self, instruction):\n i = \"%.5d\" % instruction\n return (int(i[2]), int(i[1]), int(i[0]))\n \n def _fetch(self):\n instruction = self._memory[self._pc]\n self._pc += 1\n if instruction > 100:\n return instruction % 100, self._parse_modes(instruction)\n else:\n return instruction, (MODE_POSITION, MODE_POSITION, MODE_POSITION)\n\n def _pop(self):\n v = self._memory[self._pc]\n self._pc += 1\n return v\n \n def _load(self, a, mode):\n if mode == MODE_IMMEDIATE:\n return a\n elif mode == MODE_POSITION:\n return self._memory[a]\n elif mode == MODE_RELATIVE:\n return self._memory[self._relative_base + a]\n else:\n raise InvalidModeException()\n\n def _store(self, a, mode, v):\n if mode == MODE_IMMEDIATE:\n pass\n if mode == MODE_POSITION:\n self._memory[a] = v\n elif mode == MODE_RELATIVE:\n self._memory[self._relative_base + a] = v\n else:\n raise InvalidModeException()\n \n def _add(self, modes, a, b, d):\n self._store(d, modes[2], self._load(a, modes[0]) + self._load(b, modes[1]))\n \n def _multiply(self, modes, a, b, d):\n self._store(d, modes[2], self._load(a, modes[0]) * self._load(b, modes[1]))\n\n def _input(self, modes, a):\n if self._interactive:\n self._store(a, modes[0], int(input(\"=> \")))\n else:\n self._store(a, modes[0], self._inputs.popleft())\n\n def _output(self, modes, s):\n v = self._load(s, modes[0])\n if self._interactive:\n print(v)\n else:\n self._outputs.append(v)\n \n def _jump_if_true(self, modes, a, d):\n if self._load(a, modes[0]) != 0:\n self._pc = self._load(d, modes[1])\n\n def _jump_if_false(self, modes, a, d):\n if self._load(a, modes[0]) == 0:\n self._pc = self._load(d, modes[1])\n\n def _less_than(self, modes, a, b, d):\n if self._load(a, modes[0]) < self._load(b, modes[1]):\n self._store(d, modes[2], 1)\n else:\n self._store(d, modes[2], 0)\n\n def _equals(self, modes, a, b, d):\n if self._load(a, modes[0]) == self._load(b, modes[1]):\n self._store(d, modes[2], 1)\n else:\n self._store(d, modes[2], 0)\n\n def _adjust_relative_base(self, modes, a):\n self._relative_base += self._load(a, modes[0])\n \n def run(self, debug = False):\n while True:\n instruction, modes = self._fetch()\n if debug:\n print(instruction, modes)\n if instruction == INS_ADD:\n self._add(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_MULTIPLY:\n self._multiply(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_INPUT:\n self._input(modes, self._pop())\n elif instruction == INS_OUTPUT:\n v = self._output(modes, self._pop())\n if not self._interactive:\n return v\n elif instruction == INS_JUMP_IF_TRUE:\n self._jump_if_true(modes, self._pop(), self._pop())\n elif instruction == INS_JUMP_IF_FALSE:\n self._jump_if_false(modes, self._pop(), self._pop())\n elif instruction == INS_LESS_THAN:\n self._less_than(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_EQUALS:\n self._equals(modes, self._pop(), self._pop(), self._pop())\n elif instruction == INS_ADJUST_RELATIVE_BASE:\n self._adjust_relative_base(modes, self._pop())\n elif instruction == INS_DONE:\n return self._outputs\n else:\n raise InvalidInstructionException(instruction)\n\n\nPROGRAM = [1102,34463338,34463338,63,1007,63,34463338,63,1005,63,53,1101,0,3,1000,109,988,209,12,9,1000,209,6,209,3,203,0,1008,1000,1,63,1005,63,65,1008,1000,2,63,1005,63,904,1008,1000,0,63,1005,63,58,4,25,104,0,99,4,0,104,0,99,4,17,104,0,99,0,0,1101,0,396,1029,1101,0,356,1023,1101,401,0,1028,1101,24,0,1008,1101,33,0,1019,1101,35,0,1010,1102,359,1,1022,1102,32,1,1001,1101,37,0,1004,1101,0,31,1009,1101,0,30,1003,1101,28,0,1002,1102,1,36,1014,1102,20,1,1012,1101,21,0,1000,1101,0,22,1015,1102,23,1,1013,1102,1,1,1021,1102,1,39,1007,1102,26,1,1017,1101,0,38,1016,1101,0,437,1024,1102,432,1,1025,1101,0,421,1026,1101,0,29,1005,1101,27,0,1011,1102,1,0,1020,1101,0,25,1018,1101,0,414,1027,1102,34,1,1006,109,6,2108,33,-3,63,1005,63,201,1001,64,1,64,1105,1,203,4,187,1002,64,2,64,109,14,21108,40,40,-6,1005,1014,221,4,209,1105,1,225,1001,64,1,64,1002,64,2,64,109,-21,2102,1,3,63,1008,63,28,63,1005,63,251,4,231,1001,64,1,64,1106,0,251,1002,64,2,64,109,12,2101,0,-3,63,1008,63,21,63,1005,63,275,1001,64,1,64,1105,1,277,4,257,1002,64,2,64,109,-10,1207,1,27,63,1005,63,293,1105,1,299,4,283,1001,64,1,64,1002,64,2,64,109,9,21108,41,42,3,1005,1013,315,1105,1,321,4,305,1001,64,1,64,1002,64,2,64,109,-12,1202,6,1,63,1008,63,37,63,1005,63,347,4,327,1001,64,1,64,1105,1,347,1002,64,2,64,109,29,2105,1,-4,1105,1,365,4,353,1001,64,1,64,1002,64,2,64,109,-17,2108,32,-9,63,1005,63,387,4,371,1001,64,1,64,1105,1,387,1002,64,2,64,109,17,2106,0,1,4,393,1105,1,405,1001,64,1,64,1002,64,2,64,109,1,2106,0,-1,1001,64,1,64,1106,0,423,4,411,1002,64,2,64,109,-13,2105,1,9,4,429,1106,0,441,1001,64,1,64,1002,64,2,64,109,3,21107,42,41,-1,1005,1017,461,1001,64,1,64,1106,0,463,4,447,1002,64,2,64,109,-4,21107,43,44,1,1005,1015,481,4,469,1106,0,485,1001,64,1,64,1002,64,2,64,109,-6,21101,44,0,6,1008,1014,47,63,1005,63,505,1106,0,511,4,491,1001,64,1,64,1002,64,2,64,109,-6,1208,-1,32,63,1005,63,529,4,517,1105,1,533,1001,64,1,64,1002,64,2,64,109,11,1205,7,545,1106,0,551,4,539,1001,64,1,64,1002,64,2,64,109,11,21102,45,1,-7,1008,1017,48,63,1005,63,575,1001,64,1,64,1106,0,577,4,557,1002,64,2,64,109,-8,1206,5,593,1001,64,1,64,1105,1,595,4,583,1002,64,2,64,109,7,1206,-3,609,4,601,1106,0,613,1001,64,1,64,1002,64,2,64,109,-10,2101,0,-6,63,1008,63,39,63,1005,63,635,4,619,1106,0,639,1001,64,1,64,1002,64,2,64,109,-9,1208,0,39,63,1005,63,655,1106,0,661,4,645,1001,64,1,64,1002,64,2,64,109,4,2107,25,0,63,1005,63,681,1001,64,1,64,1105,1,683,4,667,1002,64,2,64,109,-5,2107,31,-2,63,1005,63,701,4,689,1106,0,705,1001,64,1,64,1002,64,2,64,109,19,1205,-1,719,4,711,1105,1,723,1001,64,1,64,1002,64,2,64,109,-17,1201,3,0,63,1008,63,24,63,1005,63,745,4,729,1106,0,749,1001,64,1,64,1002,64,2,64,109,13,21102,46,1,-3,1008,1015,46,63,1005,63,771,4,755,1105,1,775,1001,64,1,64,1002,64,2,64,109,-13,1207,4,32,63,1005,63,793,4,781,1106,0,797,1001,64,1,64,1002,64,2,64,109,7,2102,1,-9,63,1008,63,27,63,1005,63,821,1001,64,1,64,1105,1,823,4,803,1002,64,2,64,109,-18,1201,8,0,63,1008,63,25,63,1005,63,847,1001,64,1,64,1106,0,849,4,829,1002,64,2,64,109,23,21101,47,0,2,1008,1019,47,63,1005,63,871,4,855,1106,0,875,1001,64,1,64,1002,64,2,64,109,-22,1202,5,1,63,1008,63,19,63,1005,63,899,1001,64,1,64,1106,0,901,4,881,4,64,99,21102,27,1,1,21102,1,915,0,1105,1,922,21201,1,25165,1,204,1,99,109,3,1207,-2,3,63,1005,63,964,21201,-2,-1,1,21102,942,1,0,1105,1,922,22102,1,1,-1,21201,-2,-3,1,21101,0,957,0,1105,1,922,22201,1,-1,-2,1106,0,968,21201,-2,0,-2,109,-3,2105,1,0]\n\n\nif __name__ == \"__main__\":\n c = Computer(PROGRAM, [])\n c.run()\n",
"step-ids": [
17,
19,
21,
22,
25
]
}
|
[
17,
19,
21,
22,
25
] |
<|reserved_special_token_0|>
class LdapSync(Thread):
def __init__(self, settings):
Thread.__init__(self)
self.settings = settings
def run(self):
if self.settings.enable_group_sync:
migrate_dn_pairs(settings=self.settings)
self.start_sync()
self.show_sync_result()
def show_sync_result(self):
pass
def start_sync(self):
data_ldap = self.get_data_from_ldap()
if data_ldap is None:
return
data_db = self.get_data_from_db()
if data_db is None:
return
self.sync_data(data_db, data_ldap)
def get_data_from_db(self):
return None
def get_data_from_ldap(self):
ret = {}
for config in self.settings.ldap_configs:
cur_ret = self.get_data_from_ldap_by_server(config)
if cur_ret is None:
return None
for key in cur_ret.keys():
if key not in ret:
ret[key] = cur_ret[key]
ret[key].config = config
return ret
def get_data_from_ldap_by_server(self, config):
return None
def sync_data(self, data_db, data_ldap):
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def migrate_dn_pairs(settings):
grp_dn_pairs = get_group_dn_pairs()
if grp_dn_pairs is None:
logger.warning(
'get group dn pairs from db failed when migrate dn pairs.')
return
grp_dn_pairs.reverse()
for grp_dn_pair in grp_dn_pairs:
for config in settings.ldap_configs:
search_filter = '(objectClass=*)'
ldap_conn = LdapConn(config.host, config.user_dn, config.passwd,
config.follow_referrals)
ldap_conn.create_conn()
if not ldap_conn.conn:
logger.warning('connect ldap server [%s] failed.' % config.
user_dn)
return
if config.use_page_result:
results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,
search_filter, [config.group_uuid_attr])
else:
results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,
search_filter, [config.group_uuid_attr])
ldap_conn.unbind_conn()
results = bytes2str(results)
if not results:
continue
else:
uuid = results[0][1][config.group_uuid_attr][0]
add_group_uuid_pair(grp_dn_pair.group_id, uuid)
class LdapSync(Thread):
def __init__(self, settings):
Thread.__init__(self)
self.settings = settings
def run(self):
if self.settings.enable_group_sync:
migrate_dn_pairs(settings=self.settings)
self.start_sync()
self.show_sync_result()
def show_sync_result(self):
pass
def start_sync(self):
data_ldap = self.get_data_from_ldap()
if data_ldap is None:
return
data_db = self.get_data_from_db()
if data_db is None:
return
self.sync_data(data_db, data_ldap)
def get_data_from_db(self):
return None
def get_data_from_ldap(self):
ret = {}
for config in self.settings.ldap_configs:
cur_ret = self.get_data_from_ldap_by_server(config)
if cur_ret is None:
return None
for key in cur_ret.keys():
if key not in ret:
ret[key] = cur_ret[key]
ret[key].config = config
return ret
def get_data_from_ldap_by_server(self, config):
return None
def sync_data(self, data_db, data_ldap):
pass
<|reserved_special_token_1|>
<|reserved_special_token_0|>
logger = logging.getLogger(__name__)
def migrate_dn_pairs(settings):
grp_dn_pairs = get_group_dn_pairs()
if grp_dn_pairs is None:
logger.warning(
'get group dn pairs from db failed when migrate dn pairs.')
return
grp_dn_pairs.reverse()
for grp_dn_pair in grp_dn_pairs:
for config in settings.ldap_configs:
search_filter = '(objectClass=*)'
ldap_conn = LdapConn(config.host, config.user_dn, config.passwd,
config.follow_referrals)
ldap_conn.create_conn()
if not ldap_conn.conn:
logger.warning('connect ldap server [%s] failed.' % config.
user_dn)
return
if config.use_page_result:
results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,
search_filter, [config.group_uuid_attr])
else:
results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,
search_filter, [config.group_uuid_attr])
ldap_conn.unbind_conn()
results = bytes2str(results)
if not results:
continue
else:
uuid = results[0][1][config.group_uuid_attr][0]
add_group_uuid_pair(grp_dn_pair.group_id, uuid)
class LdapSync(Thread):
def __init__(self, settings):
Thread.__init__(self)
self.settings = settings
def run(self):
if self.settings.enable_group_sync:
migrate_dn_pairs(settings=self.settings)
self.start_sync()
self.show_sync_result()
def show_sync_result(self):
pass
def start_sync(self):
data_ldap = self.get_data_from_ldap()
if data_ldap is None:
return
data_db = self.get_data_from_db()
if data_db is None:
return
self.sync_data(data_db, data_ldap)
def get_data_from_db(self):
return None
def get_data_from_ldap(self):
ret = {}
for config in self.settings.ldap_configs:
cur_ret = self.get_data_from_ldap_by_server(config)
if cur_ret is None:
return None
for key in cur_ret.keys():
if key not in ret:
ret[key] = cur_ret[key]
ret[key].config = config
return ret
def get_data_from_ldap_by_server(self, config):
return None
def sync_data(self, data_db, data_ldap):
pass
<|reserved_special_token_1|>
import logging
from threading import Thread
from ldap import SCOPE_BASE
from seafevents.ldap_syncer.ldap_conn import LdapConn
from seafevents.ldap_syncer.utils import bytes2str, add_group_uuid_pair
from seaserv import get_group_dn_pairs
logger = logging.getLogger(__name__)
def migrate_dn_pairs(settings):
grp_dn_pairs = get_group_dn_pairs()
if grp_dn_pairs is None:
logger.warning(
'get group dn pairs from db failed when migrate dn pairs.')
return
grp_dn_pairs.reverse()
for grp_dn_pair in grp_dn_pairs:
for config in settings.ldap_configs:
search_filter = '(objectClass=*)'
ldap_conn = LdapConn(config.host, config.user_dn, config.passwd,
config.follow_referrals)
ldap_conn.create_conn()
if not ldap_conn.conn:
logger.warning('connect ldap server [%s] failed.' % config.
user_dn)
return
if config.use_page_result:
results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,
search_filter, [config.group_uuid_attr])
else:
results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,
search_filter, [config.group_uuid_attr])
ldap_conn.unbind_conn()
results = bytes2str(results)
if not results:
continue
else:
uuid = results[0][1][config.group_uuid_attr][0]
add_group_uuid_pair(grp_dn_pair.group_id, uuid)
class LdapSync(Thread):
def __init__(self, settings):
Thread.__init__(self)
self.settings = settings
def run(self):
if self.settings.enable_group_sync:
migrate_dn_pairs(settings=self.settings)
self.start_sync()
self.show_sync_result()
def show_sync_result(self):
pass
def start_sync(self):
data_ldap = self.get_data_from_ldap()
if data_ldap is None:
return
data_db = self.get_data_from_db()
if data_db is None:
return
self.sync_data(data_db, data_ldap)
def get_data_from_db(self):
return None
def get_data_from_ldap(self):
ret = {}
for config in self.settings.ldap_configs:
cur_ret = self.get_data_from_ldap_by_server(config)
if cur_ret is None:
return None
for key in cur_ret.keys():
if key not in ret:
ret[key] = cur_ret[key]
ret[key].config = config
return ret
def get_data_from_ldap_by_server(self, config):
return None
def sync_data(self, data_db, data_ldap):
pass
<|reserved_special_token_1|>
#coding: utf-8
import logging
from threading import Thread
from ldap import SCOPE_BASE
from seafevents.ldap_syncer.ldap_conn import LdapConn
from seafevents.ldap_syncer.utils import bytes2str, add_group_uuid_pair
from seaserv import get_group_dn_pairs
logger = logging.getLogger(__name__)
def migrate_dn_pairs(settings):
grp_dn_pairs = get_group_dn_pairs()
if grp_dn_pairs is None:
logger.warning('get group dn pairs from db failed when migrate dn pairs.')
return
grp_dn_pairs.reverse()
for grp_dn_pair in grp_dn_pairs:
for config in settings.ldap_configs:
search_filter = '(objectClass=*)'
ldap_conn = LdapConn(config.host, config.user_dn, config.passwd, config.follow_referrals)
ldap_conn.create_conn()
if not ldap_conn.conn:
logger.warning('connect ldap server [%s] failed.' % config.user_dn)
return
if config.use_page_result:
results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,
search_filter,
[config.group_uuid_attr])
else:
results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,
search_filter,
[config.group_uuid_attr])
ldap_conn.unbind_conn()
results = bytes2str(results)
if not results:
continue
else:
uuid = results[0][1][config.group_uuid_attr][0]
add_group_uuid_pair(grp_dn_pair.group_id, uuid)
class LdapSync(Thread):
def __init__(self, settings):
Thread.__init__(self)
self.settings = settings
def run(self):
if self.settings.enable_group_sync:
migrate_dn_pairs(settings=self.settings)
self.start_sync()
self.show_sync_result()
def show_sync_result(self):
pass
def start_sync(self):
data_ldap = self.get_data_from_ldap()
if data_ldap is None:
return
data_db = self.get_data_from_db()
if data_db is None:
return
self.sync_data(data_db, data_ldap)
def get_data_from_db(self):
return None
def get_data_from_ldap(self):
ret = {}
for config in self.settings.ldap_configs:
cur_ret = self.get_data_from_ldap_by_server(config)
# If get data from one server failed, then the result is failed
if cur_ret is None:
return None
for key in cur_ret.keys():
if key not in ret:
ret[key] = cur_ret[key]
ret[key].config = config
return ret
def get_data_from_ldap_by_server(self, config):
return None
def sync_data(self, data_db, data_ldap):
pass
|
flexible
|
{
"blob_id": "8cc0393082448bb8f61068b5c96e89ef3aee77ed",
"index": 235,
"step-1": "<mask token>\n\n\nclass LdapSync(Thread):\n\n def __init__(self, settings):\n Thread.__init__(self)\n self.settings = settings\n\n def run(self):\n if self.settings.enable_group_sync:\n migrate_dn_pairs(settings=self.settings)\n self.start_sync()\n self.show_sync_result()\n\n def show_sync_result(self):\n pass\n\n def start_sync(self):\n data_ldap = self.get_data_from_ldap()\n if data_ldap is None:\n return\n data_db = self.get_data_from_db()\n if data_db is None:\n return\n self.sync_data(data_db, data_ldap)\n\n def get_data_from_db(self):\n return None\n\n def get_data_from_ldap(self):\n ret = {}\n for config in self.settings.ldap_configs:\n cur_ret = self.get_data_from_ldap_by_server(config)\n if cur_ret is None:\n return None\n for key in cur_ret.keys():\n if key not in ret:\n ret[key] = cur_ret[key]\n ret[key].config = config\n return ret\n\n def get_data_from_ldap_by_server(self, config):\n return None\n\n def sync_data(self, data_db, data_ldap):\n pass\n",
"step-2": "<mask token>\n\n\ndef migrate_dn_pairs(settings):\n grp_dn_pairs = get_group_dn_pairs()\n if grp_dn_pairs is None:\n logger.warning(\n 'get group dn pairs from db failed when migrate dn pairs.')\n return\n grp_dn_pairs.reverse()\n for grp_dn_pair in grp_dn_pairs:\n for config in settings.ldap_configs:\n search_filter = '(objectClass=*)'\n ldap_conn = LdapConn(config.host, config.user_dn, config.passwd,\n config.follow_referrals)\n ldap_conn.create_conn()\n if not ldap_conn.conn:\n logger.warning('connect ldap server [%s] failed.' % config.\n user_dn)\n return\n if config.use_page_result:\n results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n else:\n results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n ldap_conn.unbind_conn()\n results = bytes2str(results)\n if not results:\n continue\n else:\n uuid = results[0][1][config.group_uuid_attr][0]\n add_group_uuid_pair(grp_dn_pair.group_id, uuid)\n\n\nclass LdapSync(Thread):\n\n def __init__(self, settings):\n Thread.__init__(self)\n self.settings = settings\n\n def run(self):\n if self.settings.enable_group_sync:\n migrate_dn_pairs(settings=self.settings)\n self.start_sync()\n self.show_sync_result()\n\n def show_sync_result(self):\n pass\n\n def start_sync(self):\n data_ldap = self.get_data_from_ldap()\n if data_ldap is None:\n return\n data_db = self.get_data_from_db()\n if data_db is None:\n return\n self.sync_data(data_db, data_ldap)\n\n def get_data_from_db(self):\n return None\n\n def get_data_from_ldap(self):\n ret = {}\n for config in self.settings.ldap_configs:\n cur_ret = self.get_data_from_ldap_by_server(config)\n if cur_ret is None:\n return None\n for key in cur_ret.keys():\n if key not in ret:\n ret[key] = cur_ret[key]\n ret[key].config = config\n return ret\n\n def get_data_from_ldap_by_server(self, config):\n return None\n\n def sync_data(self, data_db, data_ldap):\n pass\n",
"step-3": "<mask token>\nlogger = logging.getLogger(__name__)\n\n\ndef migrate_dn_pairs(settings):\n grp_dn_pairs = get_group_dn_pairs()\n if grp_dn_pairs is None:\n logger.warning(\n 'get group dn pairs from db failed when migrate dn pairs.')\n return\n grp_dn_pairs.reverse()\n for grp_dn_pair in grp_dn_pairs:\n for config in settings.ldap_configs:\n search_filter = '(objectClass=*)'\n ldap_conn = LdapConn(config.host, config.user_dn, config.passwd,\n config.follow_referrals)\n ldap_conn.create_conn()\n if not ldap_conn.conn:\n logger.warning('connect ldap server [%s] failed.' % config.\n user_dn)\n return\n if config.use_page_result:\n results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n else:\n results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n ldap_conn.unbind_conn()\n results = bytes2str(results)\n if not results:\n continue\n else:\n uuid = results[0][1][config.group_uuid_attr][0]\n add_group_uuid_pair(grp_dn_pair.group_id, uuid)\n\n\nclass LdapSync(Thread):\n\n def __init__(self, settings):\n Thread.__init__(self)\n self.settings = settings\n\n def run(self):\n if self.settings.enable_group_sync:\n migrate_dn_pairs(settings=self.settings)\n self.start_sync()\n self.show_sync_result()\n\n def show_sync_result(self):\n pass\n\n def start_sync(self):\n data_ldap = self.get_data_from_ldap()\n if data_ldap is None:\n return\n data_db = self.get_data_from_db()\n if data_db is None:\n return\n self.sync_data(data_db, data_ldap)\n\n def get_data_from_db(self):\n return None\n\n def get_data_from_ldap(self):\n ret = {}\n for config in self.settings.ldap_configs:\n cur_ret = self.get_data_from_ldap_by_server(config)\n if cur_ret is None:\n return None\n for key in cur_ret.keys():\n if key not in ret:\n ret[key] = cur_ret[key]\n ret[key].config = config\n return ret\n\n def get_data_from_ldap_by_server(self, config):\n return None\n\n def sync_data(self, data_db, data_ldap):\n pass\n",
"step-4": "import logging\nfrom threading import Thread\nfrom ldap import SCOPE_BASE\nfrom seafevents.ldap_syncer.ldap_conn import LdapConn\nfrom seafevents.ldap_syncer.utils import bytes2str, add_group_uuid_pair\nfrom seaserv import get_group_dn_pairs\nlogger = logging.getLogger(__name__)\n\n\ndef migrate_dn_pairs(settings):\n grp_dn_pairs = get_group_dn_pairs()\n if grp_dn_pairs is None:\n logger.warning(\n 'get group dn pairs from db failed when migrate dn pairs.')\n return\n grp_dn_pairs.reverse()\n for grp_dn_pair in grp_dn_pairs:\n for config in settings.ldap_configs:\n search_filter = '(objectClass=*)'\n ldap_conn = LdapConn(config.host, config.user_dn, config.passwd,\n config.follow_referrals)\n ldap_conn.create_conn()\n if not ldap_conn.conn:\n logger.warning('connect ldap server [%s] failed.' % config.\n user_dn)\n return\n if config.use_page_result:\n results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n else:\n results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter, [config.group_uuid_attr])\n ldap_conn.unbind_conn()\n results = bytes2str(results)\n if not results:\n continue\n else:\n uuid = results[0][1][config.group_uuid_attr][0]\n add_group_uuid_pair(grp_dn_pair.group_id, uuid)\n\n\nclass LdapSync(Thread):\n\n def __init__(self, settings):\n Thread.__init__(self)\n self.settings = settings\n\n def run(self):\n if self.settings.enable_group_sync:\n migrate_dn_pairs(settings=self.settings)\n self.start_sync()\n self.show_sync_result()\n\n def show_sync_result(self):\n pass\n\n def start_sync(self):\n data_ldap = self.get_data_from_ldap()\n if data_ldap is None:\n return\n data_db = self.get_data_from_db()\n if data_db is None:\n return\n self.sync_data(data_db, data_ldap)\n\n def get_data_from_db(self):\n return None\n\n def get_data_from_ldap(self):\n ret = {}\n for config in self.settings.ldap_configs:\n cur_ret = self.get_data_from_ldap_by_server(config)\n if cur_ret is None:\n return None\n for key in cur_ret.keys():\n if key not in ret:\n ret[key] = cur_ret[key]\n ret[key].config = config\n return ret\n\n def get_data_from_ldap_by_server(self, config):\n return None\n\n def sync_data(self, data_db, data_ldap):\n pass\n",
"step-5": "#coding: utf-8\nimport logging\nfrom threading import Thread\n\nfrom ldap import SCOPE_BASE\nfrom seafevents.ldap_syncer.ldap_conn import LdapConn\nfrom seafevents.ldap_syncer.utils import bytes2str, add_group_uuid_pair\n\nfrom seaserv import get_group_dn_pairs\n\n\nlogger = logging.getLogger(__name__)\n\n\ndef migrate_dn_pairs(settings):\n grp_dn_pairs = get_group_dn_pairs()\n if grp_dn_pairs is None:\n logger.warning('get group dn pairs from db failed when migrate dn pairs.')\n return\n\n grp_dn_pairs.reverse()\n for grp_dn_pair in grp_dn_pairs:\n for config in settings.ldap_configs:\n search_filter = '(objectClass=*)'\n ldap_conn = LdapConn(config.host, config.user_dn, config.passwd, config.follow_referrals)\n ldap_conn.create_conn()\n if not ldap_conn.conn:\n logger.warning('connect ldap server [%s] failed.' % config.user_dn)\n return\n\n if config.use_page_result:\n results = ldap_conn.paged_search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter,\n [config.group_uuid_attr])\n else:\n results = ldap_conn.search(grp_dn_pair.dn, SCOPE_BASE,\n search_filter,\n [config.group_uuid_attr])\n ldap_conn.unbind_conn()\n results = bytes2str(results)\n\n if not results:\n continue\n else:\n uuid = results[0][1][config.group_uuid_attr][0]\n add_group_uuid_pair(grp_dn_pair.group_id, uuid)\n\n\nclass LdapSync(Thread):\n def __init__(self, settings):\n Thread.__init__(self)\n self.settings = settings\n\n def run(self):\n if self.settings.enable_group_sync:\n migrate_dn_pairs(settings=self.settings)\n self.start_sync()\n self.show_sync_result()\n\n def show_sync_result(self):\n pass\n\n def start_sync(self):\n data_ldap = self.get_data_from_ldap()\n if data_ldap is None:\n return\n\n data_db = self.get_data_from_db()\n if data_db is None:\n return\n\n self.sync_data(data_db, data_ldap)\n\n def get_data_from_db(self):\n return None\n\n def get_data_from_ldap(self):\n ret = {}\n\n for config in self.settings.ldap_configs:\n cur_ret = self.get_data_from_ldap_by_server(config)\n # If get data from one server failed, then the result is failed\n if cur_ret is None:\n return None\n for key in cur_ret.keys():\n if key not in ret:\n ret[key] = cur_ret[key]\n ret[key].config = config\n\n return ret\n\n def get_data_from_ldap_by_server(self, config):\n return None\n\n def sync_data(self, data_db, data_ldap):\n pass\n",
"step-ids": [
9,
10,
11,
12,
13
]
}
|
[
9,
10,
11,
12,
13
] |
'''
Given a string S and a string T,
find the minimum window in S which will contain all the characters in T in complexity O(n).
For example,
S = "ADOBECODEBANC"
T = "ABC"
Minimum window is "BANC".
Note:
If there is no such window in S that covers all characters in T, return the empty string "".
If there are multiple such windows,
you are guaranteed that there will always be only one unique minimum window in S.
'''
from collections import defaultdict
class Solution:
"""
@param: source : A string
@param: target: A string
@return: A string denote the minimum window, return "" if there is no such a string
"""
def minWindow(self, source, target):
# create a hashmap/dictionary for target, {key: value = char: count}
s_char_count = defaultdict(int)
t_char_count = defaultdict(int)
for char in target:
t_char_count[char] += 1
j = 0
min_substr = ''
min_length = float('inf')
for i in range(len(source)):
while j < len(source) and not self.is_contain(s_char_count, t_char_count):
s_char_count[source[j]] += 1
j += 1
if self.is_contain(s_char_count, t_char_count):
if min_length > j - i:
min_length = j - i
min_substr = source[i:j]
s_char_count[source[i]] -= 1
return min_substr
def is_contain(self, s_char_count, t_char_count):
for char in t_char_count:
if char not in s_char_count or s_char_count[char] < t_char_count[char]:
return False
return True
|
normal
|
{
"blob_id": "665a868ee71f247a621d82108e545257296e0427",
"index": 7048,
"step-1": "<mask token>\n\n\nclass Solution:\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n\n def minWindow(self, source, target):\n s_char_count = defaultdict(int)\n t_char_count = defaultdict(int)\n for char in target:\n t_char_count[char] += 1\n j = 0\n min_substr = ''\n min_length = float('inf')\n for i in range(len(source)):\n while j < len(source) and not self.is_contain(s_char_count,\n t_char_count):\n s_char_count[source[j]] += 1\n j += 1\n if self.is_contain(s_char_count, t_char_count):\n if min_length > j - i:\n min_length = j - i\n min_substr = source[i:j]\n s_char_count[source[i]] -= 1\n return min_substr\n\n def is_contain(self, s_char_count, t_char_count):\n for char in t_char_count:\n if char not in s_char_count or s_char_count[char] < t_char_count[\n char]:\n return False\n return True\n",
"step-3": "<mask token>\n\n\nclass Solution:\n \"\"\"\n @param: source : A string\n @param: target: A string\n @return: A string denote the minimum window, return \"\" if there is no such a string\n \"\"\"\n\n def minWindow(self, source, target):\n s_char_count = defaultdict(int)\n t_char_count = defaultdict(int)\n for char in target:\n t_char_count[char] += 1\n j = 0\n min_substr = ''\n min_length = float('inf')\n for i in range(len(source)):\n while j < len(source) and not self.is_contain(s_char_count,\n t_char_count):\n s_char_count[source[j]] += 1\n j += 1\n if self.is_contain(s_char_count, t_char_count):\n if min_length > j - i:\n min_length = j - i\n min_substr = source[i:j]\n s_char_count[source[i]] -= 1\n return min_substr\n\n def is_contain(self, s_char_count, t_char_count):\n for char in t_char_count:\n if char not in s_char_count or s_char_count[char] < t_char_count[\n char]:\n return False\n return True\n",
"step-4": "<mask token>\nfrom collections import defaultdict\n\n\nclass Solution:\n \"\"\"\n @param: source : A string\n @param: target: A string\n @return: A string denote the minimum window, return \"\" if there is no such a string\n \"\"\"\n\n def minWindow(self, source, target):\n s_char_count = defaultdict(int)\n t_char_count = defaultdict(int)\n for char in target:\n t_char_count[char] += 1\n j = 0\n min_substr = ''\n min_length = float('inf')\n for i in range(len(source)):\n while j < len(source) and not self.is_contain(s_char_count,\n t_char_count):\n s_char_count[source[j]] += 1\n j += 1\n if self.is_contain(s_char_count, t_char_count):\n if min_length > j - i:\n min_length = j - i\n min_substr = source[i:j]\n s_char_count[source[i]] -= 1\n return min_substr\n\n def is_contain(self, s_char_count, t_char_count):\n for char in t_char_count:\n if char not in s_char_count or s_char_count[char] < t_char_count[\n char]:\n return False\n return True\n",
"step-5": "'''\nGiven a string S and a string T,\nfind the minimum window in S which will contain all the characters in T in complexity O(n).\n\nFor example,\nS = \"ADOBECODEBANC\"\nT = \"ABC\"\nMinimum window is \"BANC\".\n\nNote:\nIf there is no such window in S that covers all characters in T, return the empty string \"\".\n\nIf there are multiple such windows,\nyou are guaranteed that there will always be only one unique minimum window in S.\n'''\nfrom collections import defaultdict\nclass Solution:\n \"\"\"\n @param: source : A string\n @param: target: A string\n @return: A string denote the minimum window, return \"\" if there is no such a string\n \"\"\"\n def minWindow(self, source, target):\n # create a hashmap/dictionary for target, {key: value = char: count}\n s_char_count = defaultdict(int)\n t_char_count = defaultdict(int)\n\n for char in target:\n t_char_count[char] += 1\n\n j = 0\n min_substr = ''\n min_length = float('inf')\n\n for i in range(len(source)):\n while j < len(source) and not self.is_contain(s_char_count, t_char_count):\n s_char_count[source[j]] += 1\n j += 1\n\n if self.is_contain(s_char_count, t_char_count):\n if min_length > j - i:\n min_length = j - i\n min_substr = source[i:j]\n s_char_count[source[i]] -= 1\n\n return min_substr\n\n def is_contain(self, s_char_count, t_char_count):\n for char in t_char_count:\n if char not in s_char_count or s_char_count[char] < t_char_count[char]:\n return False\n return True\n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def pull_from_solr(output_directory):
solr_url = (
'http://54.191.81.42:8888/solr/collection1/select?q=*%3A*&wt=json&indent=true'
)
req = requests.get(solr_url)
if req.status_code != 200:
raise
new_data = req.json()
for doc in new_data['response']['docs']:
doc_url = doc['url']
doc_sha = hashlib.sha224(doc_url).hexdigest()
doc.update({'sha': doc_sha})
with open(os.path.join(output_directory, '%s.json' % doc_sha), 'w'
) as f:
f.write(json.dumps(doc, indent=4))
<|reserved_special_token_1|>
import requests
import json
import hashlib
import os
def pull_from_solr(output_directory):
solr_url = (
'http://54.191.81.42:8888/solr/collection1/select?q=*%3A*&wt=json&indent=true'
)
req = requests.get(solr_url)
if req.status_code != 200:
raise
new_data = req.json()
for doc in new_data['response']['docs']:
doc_url = doc['url']
doc_sha = hashlib.sha224(doc_url).hexdigest()
doc.update({'sha': doc_sha})
with open(os.path.join(output_directory, '%s.json' % doc_sha), 'w'
) as f:
f.write(json.dumps(doc, indent=4))
<|reserved_special_token_1|>
import requests
import json
import hashlib
import os
def pull_from_solr(output_directory):
solr_url = 'http://54.191.81.42:8888/solr/collection1/select?q=*%3A*&wt=json&indent=true'
# TODO: ask about auth for this
req = requests.get(solr_url)
if req.status_code != 200:
raise
new_data = req.json()
for doc in new_data['response']['docs']:
doc_url = doc['url']
doc_sha = hashlib.sha224(doc_url).hexdigest()
doc.update({"sha": doc_sha})
with open(os.path.join(output_directory, '%s.json' % doc_sha), 'w') as f:
f.write(json.dumps(doc, indent=4))
|
flexible
|
{
"blob_id": "47b40e4311f76cd620b7c6ed6b39216d866fa857",
"index": 8530,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef pull_from_solr(output_directory):\n solr_url = (\n 'http://54.191.81.42:8888/solr/collection1/select?q=*%3A*&wt=json&indent=true'\n )\n req = requests.get(solr_url)\n if req.status_code != 200:\n raise\n new_data = req.json()\n for doc in new_data['response']['docs']:\n doc_url = doc['url']\n doc_sha = hashlib.sha224(doc_url).hexdigest()\n doc.update({'sha': doc_sha})\n with open(os.path.join(output_directory, '%s.json' % doc_sha), 'w'\n ) as f:\n f.write(json.dumps(doc, indent=4))\n",
"step-3": "import requests\nimport json\nimport hashlib\nimport os\n\n\ndef pull_from_solr(output_directory):\n solr_url = (\n 'http://54.191.81.42:8888/solr/collection1/select?q=*%3A*&wt=json&indent=true'\n )\n req = requests.get(solr_url)\n if req.status_code != 200:\n raise\n new_data = req.json()\n for doc in new_data['response']['docs']:\n doc_url = doc['url']\n doc_sha = hashlib.sha224(doc_url).hexdigest()\n doc.update({'sha': doc_sha})\n with open(os.path.join(output_directory, '%s.json' % doc_sha), 'w'\n ) as f:\n f.write(json.dumps(doc, indent=4))\n",
"step-4": "import requests\nimport json\nimport hashlib\nimport os\n\n\ndef pull_from_solr(output_directory):\n solr_url = 'http://54.191.81.42:8888/solr/collection1/select?q=*%3A*&wt=json&indent=true'\n\n # TODO: ask about auth for this\n req = requests.get(solr_url)\n\n if req.status_code != 200:\n raise\n\n new_data = req.json()\n\n for doc in new_data['response']['docs']:\n doc_url = doc['url']\n doc_sha = hashlib.sha224(doc_url).hexdigest()\n doc.update({\"sha\": doc_sha})\n\n with open(os.path.join(output_directory, '%s.json' % doc_sha), 'w') as f:\n f.write(json.dumps(doc, indent=4))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import sys
import logging
import copy
import socket
from . import game_map
class GameUnix:
"""
:ivar map: Current map representation
:ivar initial_map: The initial version of the map before game starts
"""
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
self.sfile.write(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
self.sfile.write('\n')
self.sfile.flush()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
result = self.sfile.readline().rstrip('\n')
return result
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
@staticmethod
def _set_up_logging(tag, name):
"""
Set up and truncate the log
:param tag: The user tag (used for naming the log)
:param name: The bot name (used for naming the log)
:return: nothing
"""
log_file = "{}_{}.log".format(tag, name)
logging.basicConfig(filename=log_file, level=logging.DEBUG, filemode='w')
logging.info("Initialized bot {}".format(name))
def __init__(self, name, socket_path="/dev/shm/bot.sock"):
"""
Initialize the bot with the given name.
:param name: The name of the bot.
"""
self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
connected = False
while not connected:
try:
self.s.connect(socket_path)
connected = True
except Exception:
pass # Do nothing, just try again
self.sfile = self.s.makefile('rw')
self._name = name
self._send_name = False
tag = int(self._get_string())
GameUnix._set_up_logging(tag, name)
width, height = [int(x) for x in self._get_string().strip().split()]
self.map = game_map.Map(tag, width, height)
self.update_map()
self.initial_map = copy.deepcopy(self.map)
self._send_name = True
self.done = False
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info("---NEW TURN---")
recv = self._get_string()
if recv == "":
self.close()
self.done = True
return self.map # last step map
self.map._parse(recv)
return self.map
def close(self):
self.sfile.close()
self.s.close()
class GameStdIO:
"""
:ivar map: Current map representation
:ivar initial_map: The initial version of the map before game starts
"""
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
sys.stdout.write(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
sys.stdout.write('\n')
sys.stdout.flush()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
result = sys.stdin.readline().rstrip('\n')
return result
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
@staticmethod
def _set_up_logging(tag, name):
"""
Set up and truncate the log
:param tag: The user tag (used for naming the log)
:param name: The bot name (used for naming the log)
:return: nothing
"""
log_file = "{}_{}.log".format(tag, name)
logging.basicConfig(filename=log_file, level=logging.DEBUG, filemode='w')
logging.info("Initialized bot {}".format(name))
def __init__(self, name):
"""
Initialize the bot with the given name.
:param name: The name of the bot.
"""
self._name = name
self._send_name = False
tag = int(self._get_string())
GameStdIO._set_up_logging(tag, name)
width, height = [int(x) for x in self._get_string().strip().split()]
self.map = game_map.Map(tag, width, height)
self.update_map()
self.initial_map = copy.deepcopy(self.map)
self._send_name = True
self.done = False
def update_map(self):
"""
Parse the map given by the engine.
:return: new parsed map
:rtype: game_map.Map
"""
if self._send_name:
self._send_string(self._name)
self._done_sending()
self._send_name = False
logging.info("---NEW TURN---")
recv = self._get_string()
if recv == "":
self.close()
self.done = True
return self.map # last step map
self.map._parse(recv)
return self.map
def close(self):
pass
|
normal
|
{
"blob_id": "09d31df9c76975377b44470e1f2ba4a5c4b7bbde",
"index": 912,
"step-1": "<mask token>\n\n\nclass GameStdIO:\n <mask token>\n <mask token>\n\n def _done_sending(self):\n \"\"\"\n Finish sending commands to the game.\n\n :return: nothing\n \"\"\"\n sys.stdout.write('\\n')\n sys.stdout.flush()\n\n def _get_string(self):\n \"\"\"\n Read input from the game.\n\n :return: The input read from the Halite engine\n :rtype: str\n \"\"\"\n result = sys.stdin.readline().rstrip('\\n')\n return result\n\n def send_command_queue(self, command_queue):\n \"\"\"\n Issue the given list of commands.\n\n :param list[str] command_queue: List of commands to send the Halite engine\n :return: nothing\n \"\"\"\n for command in command_queue:\n self._send_string(command)\n self._done_sending()\n\n @staticmethod\n def _set_up_logging(tag, name):\n \"\"\"\n Set up and truncate the log\n\n :param tag: The user tag (used for naming the log)\n :param name: The bot name (used for naming the log)\n :return: nothing\n \"\"\"\n log_file = '{}_{}.log'.format(tag, name)\n logging.basicConfig(filename=log_file, level=logging.DEBUG,\n filemode='w')\n logging.info('Initialized bot {}'.format(name))\n\n def __init__(self, name):\n \"\"\"\n Initialize the bot with the given name.\n\n :param name: The name of the bot.\n \"\"\"\n self._name = name\n self._send_name = False\n tag = int(self._get_string())\n GameStdIO._set_up_logging(tag, name)\n width, height = [int(x) for x in self._get_string().strip().split()]\n self.map = game_map.Map(tag, width, height)\n self.update_map()\n self.initial_map = copy.deepcopy(self.map)\n self._send_name = True\n self.done = False\n\n def update_map(self):\n \"\"\"\n Parse the map given by the engine.\n\n :return: new parsed map\n :rtype: game_map.Map\n \"\"\"\n if self._send_name:\n self._send_string(self._name)\n self._done_sending()\n self._send_name = False\n logging.info('---NEW TURN---')\n recv = self._get_string()\n if recv == '':\n self.close()\n self.done = True\n return self.map\n self.map._parse(recv)\n return self.map\n\n def close(self):\n pass\n",
"step-2": "<mask token>\n\n\nclass GameUnix:\n <mask token>\n\n def _send_string(self, s):\n \"\"\"\n Send data to the game. Call :function:`done_sending` once finished.\n\n :param str s: String to send\n :return: nothing\n \"\"\"\n self.sfile.write(s)\n\n def _done_sending(self):\n \"\"\"\n Finish sending commands to the game.\n\n :return: nothing\n \"\"\"\n self.sfile.write('\\n')\n self.sfile.flush()\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def update_map(self):\n \"\"\"\n Parse the map given by the engine.\n\n :return: new parsed map\n :rtype: game_map.Map\n \"\"\"\n if self._send_name:\n self._send_string(self._name)\n self._done_sending()\n self._send_name = False\n logging.info('---NEW TURN---')\n recv = self._get_string()\n if recv == '':\n self.close()\n self.done = True\n return self.map\n self.map._parse(recv)\n return self.map\n <mask token>\n\n\nclass GameStdIO:\n \"\"\"\n :ivar map: Current map representation\n :ivar initial_map: The initial version of the map before game starts\n \"\"\"\n\n def _send_string(self, s):\n \"\"\"\n Send data to the game. Call :function:`done_sending` once finished.\n\n :param str s: String to send\n :return: nothing\n \"\"\"\n sys.stdout.write(s)\n\n def _done_sending(self):\n \"\"\"\n Finish sending commands to the game.\n\n :return: nothing\n \"\"\"\n sys.stdout.write('\\n')\n sys.stdout.flush()\n\n def _get_string(self):\n \"\"\"\n Read input from the game.\n\n :return: The input read from the Halite engine\n :rtype: str\n \"\"\"\n result = sys.stdin.readline().rstrip('\\n')\n return result\n\n def send_command_queue(self, command_queue):\n \"\"\"\n Issue the given list of commands.\n\n :param list[str] command_queue: List of commands to send the Halite engine\n :return: nothing\n \"\"\"\n for command in command_queue:\n self._send_string(command)\n self._done_sending()\n\n @staticmethod\n def _set_up_logging(tag, name):\n \"\"\"\n Set up and truncate the log\n\n :param tag: The user tag (used for naming the log)\n :param name: The bot name (used for naming the log)\n :return: nothing\n \"\"\"\n log_file = '{}_{}.log'.format(tag, name)\n logging.basicConfig(filename=log_file, level=logging.DEBUG,\n filemode='w')\n logging.info('Initialized bot {}'.format(name))\n\n def __init__(self, name):\n \"\"\"\n Initialize the bot with the given name.\n\n :param name: The name of the bot.\n \"\"\"\n self._name = name\n self._send_name = False\n tag = int(self._get_string())\n GameStdIO._set_up_logging(tag, name)\n width, height = [int(x) for x in self._get_string().strip().split()]\n self.map = game_map.Map(tag, width, height)\n self.update_map()\n self.initial_map = copy.deepcopy(self.map)\n self._send_name = True\n self.done = False\n\n def update_map(self):\n \"\"\"\n Parse the map given by the engine.\n\n :return: new parsed map\n :rtype: game_map.Map\n \"\"\"\n if self._send_name:\n self._send_string(self._name)\n self._done_sending()\n self._send_name = False\n logging.info('---NEW TURN---')\n recv = self._get_string()\n if recv == '':\n self.close()\n self.done = True\n return self.map\n self.map._parse(recv)\n return self.map\n\n def close(self):\n pass\n",
"step-3": "<mask token>\n\n\nclass GameUnix:\n <mask token>\n\n def _send_string(self, s):\n \"\"\"\n Send data to the game. Call :function:`done_sending` once finished.\n\n :param str s: String to send\n :return: nothing\n \"\"\"\n self.sfile.write(s)\n\n def _done_sending(self):\n \"\"\"\n Finish sending commands to the game.\n\n :return: nothing\n \"\"\"\n self.sfile.write('\\n')\n self.sfile.flush()\n\n def _get_string(self):\n \"\"\"\n Read input from the game.\n\n :return: The input read from the Halite engine\n :rtype: str\n \"\"\"\n result = self.sfile.readline().rstrip('\\n')\n return result\n\n def send_command_queue(self, command_queue):\n \"\"\"\n Issue the given list of commands.\n\n :param list[str] command_queue: List of commands to send the Halite engine\n :return: nothing\n \"\"\"\n for command in command_queue:\n self._send_string(command)\n self._done_sending()\n\n @staticmethod\n def _set_up_logging(tag, name):\n \"\"\"\n Set up and truncate the log\n\n :param tag: The user tag (used for naming the log)\n :param name: The bot name (used for naming the log)\n :return: nothing\n \"\"\"\n log_file = '{}_{}.log'.format(tag, name)\n logging.basicConfig(filename=log_file, level=logging.DEBUG,\n filemode='w')\n logging.info('Initialized bot {}'.format(name))\n\n def __init__(self, name, socket_path='/dev/shm/bot.sock'):\n \"\"\"\n Initialize the bot with the given name.\n\n :param name: The name of the bot.\n \"\"\"\n self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n connected = False\n while not connected:\n try:\n self.s.connect(socket_path)\n connected = True\n except Exception:\n pass\n self.sfile = self.s.makefile('rw')\n self._name = name\n self._send_name = False\n tag = int(self._get_string())\n GameUnix._set_up_logging(tag, name)\n width, height = [int(x) for x in self._get_string().strip().split()]\n self.map = game_map.Map(tag, width, height)\n self.update_map()\n self.initial_map = copy.deepcopy(self.map)\n self._send_name = True\n self.done = False\n\n def update_map(self):\n \"\"\"\n Parse the map given by the engine.\n\n :return: new parsed map\n :rtype: game_map.Map\n \"\"\"\n if self._send_name:\n self._send_string(self._name)\n self._done_sending()\n self._send_name = False\n logging.info('---NEW TURN---')\n recv = self._get_string()\n if recv == '':\n self.close()\n self.done = True\n return self.map\n self.map._parse(recv)\n return self.map\n <mask token>\n\n\nclass GameStdIO:\n \"\"\"\n :ivar map: Current map representation\n :ivar initial_map: The initial version of the map before game starts\n \"\"\"\n\n def _send_string(self, s):\n \"\"\"\n Send data to the game. Call :function:`done_sending` once finished.\n\n :param str s: String to send\n :return: nothing\n \"\"\"\n sys.stdout.write(s)\n\n def _done_sending(self):\n \"\"\"\n Finish sending commands to the game.\n\n :return: nothing\n \"\"\"\n sys.stdout.write('\\n')\n sys.stdout.flush()\n\n def _get_string(self):\n \"\"\"\n Read input from the game.\n\n :return: The input read from the Halite engine\n :rtype: str\n \"\"\"\n result = sys.stdin.readline().rstrip('\\n')\n return result\n\n def send_command_queue(self, command_queue):\n \"\"\"\n Issue the given list of commands.\n\n :param list[str] command_queue: List of commands to send the Halite engine\n :return: nothing\n \"\"\"\n for command in command_queue:\n self._send_string(command)\n self._done_sending()\n\n @staticmethod\n def _set_up_logging(tag, name):\n \"\"\"\n Set up and truncate the log\n\n :param tag: The user tag (used for naming the log)\n :param name: The bot name (used for naming the log)\n :return: nothing\n \"\"\"\n log_file = '{}_{}.log'.format(tag, name)\n logging.basicConfig(filename=log_file, level=logging.DEBUG,\n filemode='w')\n logging.info('Initialized bot {}'.format(name))\n\n def __init__(self, name):\n \"\"\"\n Initialize the bot with the given name.\n\n :param name: The name of the bot.\n \"\"\"\n self._name = name\n self._send_name = False\n tag = int(self._get_string())\n GameStdIO._set_up_logging(tag, name)\n width, height = [int(x) for x in self._get_string().strip().split()]\n self.map = game_map.Map(tag, width, height)\n self.update_map()\n self.initial_map = copy.deepcopy(self.map)\n self._send_name = True\n self.done = False\n\n def update_map(self):\n \"\"\"\n Parse the map given by the engine.\n\n :return: new parsed map\n :rtype: game_map.Map\n \"\"\"\n if self._send_name:\n self._send_string(self._name)\n self._done_sending()\n self._send_name = False\n logging.info('---NEW TURN---')\n recv = self._get_string()\n if recv == '':\n self.close()\n self.done = True\n return self.map\n self.map._parse(recv)\n return self.map\n\n def close(self):\n pass\n",
"step-4": "import sys\nimport logging\nimport copy\nimport socket\nfrom . import game_map\n\n\nclass GameUnix:\n \"\"\"\n :ivar map: Current map representation\n :ivar initial_map: The initial version of the map before game starts\n \"\"\"\n\n def _send_string(self, s):\n \"\"\"\n Send data to the game. Call :function:`done_sending` once finished.\n\n :param str s: String to send\n :return: nothing\n \"\"\"\n self.sfile.write(s)\n\n def _done_sending(self):\n \"\"\"\n Finish sending commands to the game.\n\n :return: nothing\n \"\"\"\n self.sfile.write('\\n')\n self.sfile.flush()\n\n def _get_string(self):\n \"\"\"\n Read input from the game.\n\n :return: The input read from the Halite engine\n :rtype: str\n \"\"\"\n result = self.sfile.readline().rstrip('\\n')\n return result\n\n def send_command_queue(self, command_queue):\n \"\"\"\n Issue the given list of commands.\n\n :param list[str] command_queue: List of commands to send the Halite engine\n :return: nothing\n \"\"\"\n for command in command_queue:\n self._send_string(command)\n self._done_sending()\n\n @staticmethod\n def _set_up_logging(tag, name):\n \"\"\"\n Set up and truncate the log\n\n :param tag: The user tag (used for naming the log)\n :param name: The bot name (used for naming the log)\n :return: nothing\n \"\"\"\n log_file = '{}_{}.log'.format(tag, name)\n logging.basicConfig(filename=log_file, level=logging.DEBUG,\n filemode='w')\n logging.info('Initialized bot {}'.format(name))\n\n def __init__(self, name, socket_path='/dev/shm/bot.sock'):\n \"\"\"\n Initialize the bot with the given name.\n\n :param name: The name of the bot.\n \"\"\"\n self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n connected = False\n while not connected:\n try:\n self.s.connect(socket_path)\n connected = True\n except Exception:\n pass\n self.sfile = self.s.makefile('rw')\n self._name = name\n self._send_name = False\n tag = int(self._get_string())\n GameUnix._set_up_logging(tag, name)\n width, height = [int(x) for x in self._get_string().strip().split()]\n self.map = game_map.Map(tag, width, height)\n self.update_map()\n self.initial_map = copy.deepcopy(self.map)\n self._send_name = True\n self.done = False\n\n def update_map(self):\n \"\"\"\n Parse the map given by the engine.\n\n :return: new parsed map\n :rtype: game_map.Map\n \"\"\"\n if self._send_name:\n self._send_string(self._name)\n self._done_sending()\n self._send_name = False\n logging.info('---NEW TURN---')\n recv = self._get_string()\n if recv == '':\n self.close()\n self.done = True\n return self.map\n self.map._parse(recv)\n return self.map\n\n def close(self):\n self.sfile.close()\n self.s.close()\n\n\nclass GameStdIO:\n \"\"\"\n :ivar map: Current map representation\n :ivar initial_map: The initial version of the map before game starts\n \"\"\"\n\n def _send_string(self, s):\n \"\"\"\n Send data to the game. Call :function:`done_sending` once finished.\n\n :param str s: String to send\n :return: nothing\n \"\"\"\n sys.stdout.write(s)\n\n def _done_sending(self):\n \"\"\"\n Finish sending commands to the game.\n\n :return: nothing\n \"\"\"\n sys.stdout.write('\\n')\n sys.stdout.flush()\n\n def _get_string(self):\n \"\"\"\n Read input from the game.\n\n :return: The input read from the Halite engine\n :rtype: str\n \"\"\"\n result = sys.stdin.readline().rstrip('\\n')\n return result\n\n def send_command_queue(self, command_queue):\n \"\"\"\n Issue the given list of commands.\n\n :param list[str] command_queue: List of commands to send the Halite engine\n :return: nothing\n \"\"\"\n for command in command_queue:\n self._send_string(command)\n self._done_sending()\n\n @staticmethod\n def _set_up_logging(tag, name):\n \"\"\"\n Set up and truncate the log\n\n :param tag: The user tag (used for naming the log)\n :param name: The bot name (used for naming the log)\n :return: nothing\n \"\"\"\n log_file = '{}_{}.log'.format(tag, name)\n logging.basicConfig(filename=log_file, level=logging.DEBUG,\n filemode='w')\n logging.info('Initialized bot {}'.format(name))\n\n def __init__(self, name):\n \"\"\"\n Initialize the bot with the given name.\n\n :param name: The name of the bot.\n \"\"\"\n self._name = name\n self._send_name = False\n tag = int(self._get_string())\n GameStdIO._set_up_logging(tag, name)\n width, height = [int(x) for x in self._get_string().strip().split()]\n self.map = game_map.Map(tag, width, height)\n self.update_map()\n self.initial_map = copy.deepcopy(self.map)\n self._send_name = True\n self.done = False\n\n def update_map(self):\n \"\"\"\n Parse the map given by the engine.\n\n :return: new parsed map\n :rtype: game_map.Map\n \"\"\"\n if self._send_name:\n self._send_string(self._name)\n self._done_sending()\n self._send_name = False\n logging.info('---NEW TURN---')\n recv = self._get_string()\n if recv == '':\n self.close()\n self.done = True\n return self.map\n self.map._parse(recv)\n return self.map\n\n def close(self):\n pass\n",
"step-5": "import sys\nimport logging\nimport copy\nimport socket\n\nfrom . import game_map\n\nclass GameUnix:\n \"\"\"\n :ivar map: Current map representation\n :ivar initial_map: The initial version of the map before game starts\n \"\"\"\n\n def _send_string(self, s):\n \"\"\"\n Send data to the game. Call :function:`done_sending` once finished.\n\n :param str s: String to send\n :return: nothing\n \"\"\"\n self.sfile.write(s)\n\n def _done_sending(self):\n \"\"\"\n Finish sending commands to the game.\n\n :return: nothing\n \"\"\"\n self.sfile.write('\\n')\n self.sfile.flush()\n\n def _get_string(self):\n \"\"\"\n Read input from the game.\n\n :return: The input read from the Halite engine\n :rtype: str\n \"\"\"\n result = self.sfile.readline().rstrip('\\n')\n return result\n\n def send_command_queue(self, command_queue):\n \"\"\"\n Issue the given list of commands.\n\n :param list[str] command_queue: List of commands to send the Halite engine\n :return: nothing\n \"\"\"\n for command in command_queue:\n self._send_string(command)\n\n self._done_sending()\n\n @staticmethod\n def _set_up_logging(tag, name):\n \"\"\"\n Set up and truncate the log\n\n :param tag: The user tag (used for naming the log)\n :param name: The bot name (used for naming the log)\n :return: nothing\n \"\"\"\n log_file = \"{}_{}.log\".format(tag, name)\n logging.basicConfig(filename=log_file, level=logging.DEBUG, filemode='w')\n logging.info(\"Initialized bot {}\".format(name))\n\n def __init__(self, name, socket_path=\"/dev/shm/bot.sock\"):\n \"\"\"\n Initialize the bot with the given name.\n\n :param name: The name of the bot.\n \"\"\"\n self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n connected = False\n while not connected:\n try:\n self.s.connect(socket_path)\n connected = True\n except Exception:\n pass # Do nothing, just try again\n self.sfile = self.s.makefile('rw')\n\n self._name = name\n self._send_name = False\n tag = int(self._get_string())\n GameUnix._set_up_logging(tag, name)\n width, height = [int(x) for x in self._get_string().strip().split()]\n self.map = game_map.Map(tag, width, height)\n self.update_map()\n self.initial_map = copy.deepcopy(self.map)\n self._send_name = True\n\n self.done = False\n\n def update_map(self):\n \"\"\"\n Parse the map given by the engine.\n\n :return: new parsed map\n :rtype: game_map.Map\n \"\"\"\n if self._send_name:\n self._send_string(self._name)\n self._done_sending()\n self._send_name = False\n logging.info(\"---NEW TURN---\")\n recv = self._get_string()\n\n if recv == \"\":\n self.close()\n self.done = True\n return self.map # last step map\n\n self.map._parse(recv)\n return self.map\n \n def close(self):\n self.sfile.close()\n self.s.close()\n\nclass GameStdIO:\n \"\"\"\n :ivar map: Current map representation\n :ivar initial_map: The initial version of the map before game starts\n \"\"\"\n\n def _send_string(self, s):\n \"\"\"\n Send data to the game. Call :function:`done_sending` once finished.\n\n :param str s: String to send\n :return: nothing\n \"\"\"\n sys.stdout.write(s)\n\n def _done_sending(self):\n \"\"\"\n Finish sending commands to the game.\n\n :return: nothing\n \"\"\"\n sys.stdout.write('\\n')\n sys.stdout.flush()\n\n def _get_string(self):\n \"\"\"\n Read input from the game.\n\n :return: The input read from the Halite engine\n :rtype: str\n \"\"\"\n result = sys.stdin.readline().rstrip('\\n')\n return result\n\n def send_command_queue(self, command_queue):\n \"\"\"\n Issue the given list of commands.\n\n :param list[str] command_queue: List of commands to send the Halite engine\n :return: nothing\n \"\"\"\n for command in command_queue:\n self._send_string(command)\n\n self._done_sending()\n\n @staticmethod\n def _set_up_logging(tag, name):\n \"\"\"\n Set up and truncate the log\n\n :param tag: The user tag (used for naming the log)\n :param name: The bot name (used for naming the log)\n :return: nothing\n \"\"\"\n log_file = \"{}_{}.log\".format(tag, name)\n logging.basicConfig(filename=log_file, level=logging.DEBUG, filemode='w')\n logging.info(\"Initialized bot {}\".format(name))\n\n def __init__(self, name):\n \"\"\"\n Initialize the bot with the given name.\n\n :param name: The name of the bot.\n \"\"\"\n\n self._name = name\n self._send_name = False\n tag = int(self._get_string())\n GameStdIO._set_up_logging(tag, name)\n width, height = [int(x) for x in self._get_string().strip().split()]\n self.map = game_map.Map(tag, width, height)\n self.update_map()\n self.initial_map = copy.deepcopy(self.map)\n self._send_name = True\n\n self.done = False\n\n def update_map(self):\n \"\"\"\n Parse the map given by the engine.\n\n :return: new parsed map\n :rtype: game_map.Map\n \"\"\"\n if self._send_name:\n self._send_string(self._name)\n self._done_sending()\n self._send_name = False\n logging.info(\"---NEW TURN---\")\n recv = self._get_string()\n\n if recv == \"\":\n self.close()\n self.done = True\n return self.map # last step map\n\n self.map._parse(recv)\n return self.map\n \n def close(self):\n pass",
"step-ids": [
8,
14,
18,
21,
22
]
}
|
[
8,
14,
18,
21,
22
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-27 21:49
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cms', '0020_old_tree_cleanup'),
('styleguide', '0002_flexcontainer'),
]
operations = [
migrations.CreateModel(
name='ContentSection',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='styleguide_contentsection', serialize=False, to='cms.CMSPlugin')),
('background_color', models.CharField(choices=[('navy', '#1c2532'), ('light', '#f3f4f5'), ('white', '#ffffff')], default='white', max_length=20)),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.AlterField(
model_name='flexcontainer',
name='spacing',
field=models.CharField(choices=[('flex-start', 'flex-start'), ('flex-end', 'flex-end'), ('center', 'center'), ('space-between', 'space-between'), ('space-around', 'space-around')], default='flex-start', max_length=13),
),
]
|
normal
|
{
"blob_id": "85c2a4163a3132794186b95b4068f6c6e1104828",
"index": 1306,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('cms', '0020_old_tree_cleanup'), ('styleguide',\n '0002_flexcontainer')]\n operations = [migrations.CreateModel(name='ContentSection', fields=[(\n 'cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=\n django.db.models.deletion.CASCADE, parent_link=True, primary_key=\n True, related_name='styleguide_contentsection', serialize=False, to\n ='cms.CMSPlugin')), ('background_color', models.CharField(choices=[\n ('navy', '#1c2532'), ('light', '#f3f4f5'), ('white', '#ffffff')],\n default='white', max_length=20))], options={'abstract': False},\n bases=('cms.cmsplugin',)), migrations.AlterField(model_name=\n 'flexcontainer', name='spacing', field=models.CharField(choices=[(\n 'flex-start', 'flex-start'), ('flex-end', 'flex-end'), ('center',\n 'center'), ('space-between', 'space-between'), ('space-around',\n 'space-around')], default='flex-start', max_length=13))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('cms', '0020_old_tree_cleanup'), ('styleguide',\n '0002_flexcontainer')]\n operations = [migrations.CreateModel(name='ContentSection', fields=[(\n 'cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=\n django.db.models.deletion.CASCADE, parent_link=True, primary_key=\n True, related_name='styleguide_contentsection', serialize=False, to\n ='cms.CMSPlugin')), ('background_color', models.CharField(choices=[\n ('navy', '#1c2532'), ('light', '#f3f4f5'), ('white', '#ffffff')],\n default='white', max_length=20))], options={'abstract': False},\n bases=('cms.cmsplugin',)), migrations.AlterField(model_name=\n 'flexcontainer', name='spacing', field=models.CharField(choices=[(\n 'flex-start', 'flex-start'), ('flex-end', 'flex-end'), ('center',\n 'center'), ('space-between', 'space-between'), ('space-around',\n 'space-around')], default='flex-start', max_length=13))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.11.13 on 2018-06-27 21:49\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('cms', '0020_old_tree_cleanup'),\n ('styleguide', '0002_flexcontainer'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='ContentSection',\n fields=[\n ('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='styleguide_contentsection', serialize=False, to='cms.CMSPlugin')),\n ('background_color', models.CharField(choices=[('navy', '#1c2532'), ('light', '#f3f4f5'), ('white', '#ffffff')], default='white', max_length=20)),\n ],\n options={\n 'abstract': False,\n },\n bases=('cms.cmsplugin',),\n ),\n migrations.AlterField(\n model_name='flexcontainer',\n name='spacing',\n field=models.CharField(choices=[('flex-start', 'flex-start'), ('flex-end', 'flex-end'), ('center', 'center'), ('space-between', 'space-between'), ('space-around', 'space-around')], default='flex-start', max_length=13),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class DocsIetfQosRfMacIfDirection(Integer):
subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)
namedValues = NamedValues(('downstream', 1), ('upstream', 2))
class DocsIetfQosSchedulingType(Integer):
subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)
namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (
'nonRealTimePollingService', 3), ('realTimePollingService', 4), (
'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DocsIetfQosBitRate(TextualConvention, Unsigned32):
<|reserved_special_token_0|>
class DocsIetfQosRfMacIfDirection(Integer):
subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)
namedValues = NamedValues(('downstream', 1), ('upstream', 2))
class DocsIetfQosSchedulingType(Integer):
subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)
namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (
'nonRealTimePollingService', 3), ('realTimePollingService', 4), (
'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DocsIetfQosBitRate(TextualConvention, Unsigned32):
displayHint = 'd'
class DocsIetfQosRfMacIfDirection(Integer):
subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)
namedValues = NamedValues(('downstream', 1), ('upstream', 2))
class DocsIetfQosSchedulingType(Integer):
subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)
namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (
'nonRealTimePollingService', 3), ('realTimePollingService', 4), (
'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DocsIetfQosBitRate(TextualConvention, Unsigned32):
displayHint = 'd'
class DocsIetfQosRfMacIfDirection(Integer):
subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)
namedValues = NamedValues(('downstream', 1), ('upstream', 2))
class DocsIetfQosSchedulingType(Integer):
subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)
namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (
'nonRealTimePollingService', 3), ('realTimePollingService', 4), (
'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosMIB.setOrganization(
'IETF IP over Cable Data Network (IPCDN)\nWorking Group')
if mibBuilder.loadTexts:
docsIetfQosMIB.setContactInfo(
"""
Co-Author: Michael Patrick
Postal: Motorola BCS
111 Locke Drive
Marlborough, MA 01752-7214
U.S.A.
Phone: +1 508 786 7563
E-mail: [email protected]
Co-Author: William Murwin
Postal: Motorola BCS
111 Locke Drive
Marlborough, MA 01752-7214
U.S.A.
Phone: +1 508 786 7594
E-mail: [email protected]
IETF IPCDN Working Group
General Discussion: [email protected]
Subscribe: http://www.ietf.org/mailman/listinfo/ipcdn
Archive: ftp://ftp.ietf.org/ietf-mail-archive/ipcdn
Co-chairs: Richard Woundy, [email protected]
Jean-Francois Mule, [email protected]"""
)
if mibBuilder.loadTexts:
docsIetfQosMIB.setDescription(
"""This is the management information for
Quality Of Service (QOS) for DOCSIS 1.1 and 2.0.
Copyright (C) The Internet Society (2006). This version of
this MIB module is part of RFC 4323; see the RFC itself for
full legal notices."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassTable.setDescription(
"""This table describes the packet classification
configured on the CM or CMTS.
The model is that a packet either received
as input from an interface or transmitted
for output on an interface may be compared
against an ordered list of rules pertaining to
the packet contents. Each rule is a row of this
table. A matching rule provides a Service Flow
ID to which the packet is classified.
All rules need to match for a packet to match
a classifier.
The objects in this row correspond to a set of
Classifier Encoding parameters in a DOCSIS
MAC management message. The
docsIetfQosPktClassBitMap indicates which
particular parameters were present in the
classifier as signaled in the DOCSIS message.
If the referenced parameter was not present
in the signaled DOCSIS 1.1 and 2.0 Classifier, the
corresponding object in this row reports a
value as specified in the DESCRIPTION section."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassEntry.setDescription(
"""An entry in this table provides a single packet
classifier rule. The index ifIndex is an ifType
of docsCableMaclayer(127)."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassId.setDescription(
"""Index assigned to packet classifier entry by
the CMTS, which is unique per Service Flow."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassDirection.setDescription(
"""Indicates the direction to which the classifier
is applied.""")
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassPriority.setDescription(
"""The value specifies the order of evaluation
of the classifiers.
The higher the value, the higher the priority.
The value of 0 is used as default in
provisioned Service Flows Classifiers.
The default value of 64 is used for dynamic
Service Flow Classifiers.
If the referenced parameter is not present
in a classifier, this object reports the default
value as defined above."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassIpTosLow.setDescription(
"""The low value of a range of TOS byte values.
If the referenced parameter is not present
in a classifier, this object reports the value
of 0.
The IP TOS octet, as originally defined in RFC 791,
has been superseded by the 6-bit Differentiated
Services Field (DSField, RFC 3260) and the 2-bit
Explicit Congestion Notification Field (ECN field,
RFC 3168). This object is defined as an 8-bit
octet as per the DOCSIS Specification
for packet classification."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassIpTosHigh.setDescription(
"""The 8-bit high value of a range of TOS byte
values.
If the referenced parameter is not present
in a classifier, this object reports the
value of 0.
The IP TOS octet as originally defined in RFC 791
has been superseded by the 6-bit Differentiated
Services Field (DSField, RFC 3260) and the 2-bit
Explicit Congestion Notification Field (ECN field,
RFC 3168). This object is defined as an 8-bit
octet as defined by the DOCSIS Specification
for packet classification."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassIpTosMask.setDescription(
"""The mask value is bitwise ANDed with TOS byte
in an IP packet, and this value is used for
range checking of TosLow and TosHigh.
If the referenced parameter is not present
in a classifier, this object reports the value
of 0.
The IP TOS octet as originally defined in RFC 791
has been superseded by the 6-bit Differentiated
Services Field (DSField, RFC 3260) and the 2-bit
Explicit Congestion Notification Field (ECN field,
RFC 3168). This object is defined as an 8-bit
octet per the DOCSIS Specification for packet
classification."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassIpProtocol.setDescription(
"""This object indicates the value of the IP
Protocol field required for IP packets to match
this rule.
The value 256 matches traffic with any IP Protocol
value. The value 257 by convention matches both TCP
and UDP.
If the referenced parameter is not present
in a classifier, this object reports the value
of 258."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassInetAddressType.setDescription(
"""The type of the Internet address for
docsIetfQosPktClassInetSourceAddr,
docsIetfQosPktClassInetSourceMask,
docsIetfQosPktClassInetDestAddr, and
docsIetfQosPktClassInetDestMask.
If the referenced parameter is not present
in a classifier, this object reports the value of
ipv4(1)."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassInetSourceAddr.setDescription(
"""This object specifies the value of the IP
Source Address required for packets to match
this rule.
An IP packet matches the rule when the packet
IP Source Address bitwise ANDed with the
docsIetfQosPktClassInetSourceMask value equals the
docsIetfQosPktClassInetSourceAddr value.
The address type of this object is specified by
docsIetfQosPktClassInetAddressType.
If the referenced parameter is not present
in a classifier, this object reports the value of
'00000000'H."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassInetSourceMask.setDescription(
"""This object specifies which bits of a packet's
IP Source Address are compared to match
this rule.
An IP packet matches the rule when the packet
source address bitwise ANDed with the
docsIetfQosPktClassInetSourceMask value equals the
docsIetfQosIpPktClassInetSourceAddr value.
The address type of this object is specified by
docsIetfQosPktClassInetAddressType.
If the referenced parameter is not present
in a classifier, this object reports the value of
'FFFFFFFF'H."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassInetDestAddr.setDescription(
"""This object specifies the value of the IP
Destination Address required for packets to match
this rule.
An IP packet matches the rule when the packet
IP Destination Address bitwise ANDed with the
docsIetfQosPktClassInetDestMask value
equals the docsIetfQosPktClassInetDestAddr value.
The address type of this object is specified by
docsIetfQosPktClassInetAddressType.
If the referenced parameter is not present
in a classifier, this object reports the value of
'00000000'H."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassInetDestMask.setDescription(
"""This object specifies which bits of a packet's
IP Destination Address are compared to
match this rule.
An IP packet matches the rule when the packet
destination address bitwise ANDed with the
docsIetfQosPktClassInetDestMask value equals the
docsIetfQosIpPktClassInetDestAddr value.
The address type of this object is specified by
docsIetfQosPktClassInetAddressType.
If the referenced parameter is not present
in a classifier, this object reports the value of
'FFFFFFFF'H."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassSourcePortStart.setDescription(
"""This object specifies the low-end inclusive
range of TCP/UDP source port numbers to which
a packet is compared. This object is irrelevant
for non-TCP/UDP IP packets.
If the referenced parameter is not present
in a classifier, this object reports the value
of 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassSourcePortEnd.setDescription(
"""This object specifies the high-end inclusive
range of TCP/UDP source port numbers to which
a packet is compared. This object is irrelevant
for non-TCP/UDP IP packets.
If the referenced parameter is not present
in a classifier, this object reports the value of
65535."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassDestPortStart.setDescription(
"""This object specifies the low-end inclusive
range of TCP/UDP destination port numbers to
which a packet is compared.
If the referenced parameter is not present
in a classifier, this object reports the value
of 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassDestPortEnd.setDescription(
"""This object specifies the high-end inclusive
range of TCP/UDP destination port numbers to which
a packet is compared.
If the referenced parameter is not present
in a classifier, this object reports the value of
65535."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassDestMacAddr.setDescription(
"""An Ethernet packet matches an entry when its
destination MAC address bitwise ANDed with
docsIetfQosPktClassDestMacMask equals the value of
docsIetfQosPktClassDestMacAddr.
If the referenced parameter is not present
in a classifier, this object reports the value of
'000000000000'H."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassDestMacMask.setDescription(
"""An Ethernet packet matches an entry when its
destination MAC address bitwise ANDed with
docsIetfQosPktClassDestMacMask equals the value of
docsIetfQosPktClassDestMacAddr.
If the referenced parameter is not present
in a classifier, this object reports the value of
'000000000000'H."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassSourceMacAddr.setDescription(
"""An Ethernet packet matches this entry when its
source MAC address equals the value of
this object.
If the referenced parameter is not present
in a classifier, this object reports the value of
'FFFFFFFFFFFF'H."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassEnetProtocolType.setDescription(
"""This object indicates the format of the layer 3
protocol ID in the Ethernet packet. A value of
none(0) means that the rule does not use the
layer 3 protocol type as a matching criteria.
A value of ethertype(1) means that the rule
applies only to frames that contain an
EtherType value. Ethertype values are contained
in packets using the Dec-Intel-Xerox (DIX)
encapsulation or the RFC1042 Sub-Network Access
Protocol (SNAP) encapsulation formats.
A value of dsap(2) means that the rule applies
only to frames using the IEEE802.3
encapsulation format with a Destination Service
Access Point (DSAP) other
than 0xAA (which is reserved for SNAP).
A value of mac(3) means that the rule applies
only to MAC management messages for MAC management
messages.
A value of all(4) means that the rule matches
all Ethernet packets.
If the Ethernet frame contains an 802.1P/Q Tag
header (i.e., EtherType 0x8100), this object
applies to the embedded EtherType field within
the 802.1P/Q header.
If the referenced parameter is not present in a
classifier, this object reports the value of 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassEnetProtocol.setDescription(
"""If docsIetfQosEthPktClassProtocolType is none(0),
this object is ignored when considering whether
a packet matches the current rule.
If dosQosPktClassEnetProtocolType is ethertype(1),
this object gives the 16-bit value of the
EtherType that the packet must match in order to
match the rule.
If docsIetfQosPktClassEnetProtocolType is dsap(2),
the lower 8 bits of this object's value must match
the DSAP byte of the packet in order to match the
rule.
If docsIetfQosPktClassEnetProtocolType is mac(3),
the lower 8 bits of this object's value represent a
lower bound (inclusive) of MAC management message
type codes matched, and the upper 8 bits represent
the upper bound (inclusive) of matched MAC message
type codes. Certain message type codes are
excluded from matching, as specified in the
reference.
If the Ethernet frame contains an 802.1P/Q Tag
header (i.e., EtherType 0x8100), this object applies
to the embedded EtherType field within the 802.1P/Q
header.
If the referenced parameter is not present in the
classifier, the value of this object is reported
as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassUserPriLow.setDescription(
"""This object applies only to Ethernet frames
using the 802.1P/Q tag header (indicated with
EtherType 0x8100). Such frames include a 16-bit
Tag that contains a 3-bit Priority field and
a 12-bit VLAN number.
Tagged Ethernet packets must have a 3-bit
Priority field within the range of
docsIetfQosPktClassPriLow to
docsIetfQosPktClassPriHigh in order to match this
rule.
If the referenced parameter is not present in the
classifier, the value of this object is reported
as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassUserPriHigh.setDescription(
"""This object applies only to Ethernet frames
using the 802.1P/Qtag header (indicated with
EtherType 0x8100). Such frames include a 16-bit
Tag that contains a 3-bit Priority field and
a 12-bit VLAN number.
Tagged Ethernet packets must have a 3-bit
Priority field within the range of
docsIetfQosPktClassPriLow to
docsIetfQosPktClassPriHigh in order to match this
rule.
If the referenced parameter is not present in the
classifier, the value of this object is reported
as 7."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassVlanId.setDescription(
"""This object applies only to Ethernet frames
using the 802.1P/Q tag header.
Tagged packets must have a VLAN Identifier that
matches the value in order to match the rule.
If the referenced parameter is not present in the
classifier, the value of this object is reported
as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassStateActive.setDescription(
"""This object indicates whether or not the classifier
is enabled to classify packets to a Service Flow.
If the referenced parameter is not present in the
classifier, the value of this object is reported
as true(1)."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassPkts.setDescription(
"""This object counts the number of packets that have
been classified using this entry. This
includes all packets delivered to a Service Flow
maximum rate policing function, whether or not that
function drops the packets.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPktClassBitMap.setDescription(
"""This object indicates which parameter encodings
were actually present in the DOCSIS packet
classifier encoding signaled in the DOCSIS message
that created or modified the classifier. Note that
Dynamic Service Change messages have replace
semantics, so that all non-default parameters must
be present whether the classifier is being created
or changed.
A bit of this object is set to 1 if the parameter
indicated by the comment was present in the
classifier encoding, and to 0 otherwise.
Note that BITS are encoded most significant bit
first, so that if, for example, bits 6 and 7 are
set, this object is encoded as the octet string
'030000'H."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetTable.setDescription(
"""This table describes the set of DOCSIS 1.1 and 2.0
QOS parameters defined in a managed device.
The ifIndex index specifies a DOCSIS MAC Domain.
The docsIetfQosServiceFlowId index specifies a
particular Service Flow.
The docsIetfQosParamSetType index indicates whether
the active, admitted, or provisioned QOS Parameter
Set is being described by the row.
Only the QOS Parameter Sets of DOCSIS 1.1 and 2.0
Service Flows are represented in this table.
DOCSIS 1.0 QOS service profiles are not
represented in this table.
Each row corresponds to a DOCSIS QOS Parameter Set
as signaled via DOCSIS MAC management messages.
Each object in the row corresponds to one or
part of one DOCSIS 1.1 Service Flow Encoding.
The docsIetfQosParamSetBitMap object in the row
indicates which particular parameters were signaled
in the original registration or dynamic service
request message that created the QOS Parameter Set.
In many cases, even if a QOS Parameter Set parameter
was not signaled, the DOCSIS specification calls
for a default value to be used. That default value
is reported as the value of the corresponding object
in this row.
Many objects are not applicable, depending on
the Service Flow direction or upstream scheduling
type. The object value reported in this case
is specified in the DESCRIPTION clause."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetEntry.setDescription('A unique set of QOS parameters.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetServiceClassName.setDescription(
"""Refers to the Service Class Name from which the
parameter set values were derived.
If the referenced parameter is not present in the
corresponding DOCSIS QOS Parameter Set, the default
value of this object is a zero-length string."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetPriority.setDescription(
"""The relative priority of a Service Flow.
Higher numbers indicate higher priority.
This priority should only be used to differentiate
Service Flow from identical parameter sets.
If the referenced parameter is not present in the
corresponding DOCSIS QOS Parameter Set, the default
value of this object is 0. If the parameter is
not applicable, the reported value is 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetMaxTrafficRate.setDescription(
"""Maximum sustained traffic rate allowed for this
Service Flow in bits/sec. Must count all MAC frame
data PDU from the bytes following the MAC header
HCS to the end of the CRC. The number of bytes
forwarded is limited during any time interval.
The value 0 means no maximum traffic rate is
enforced. This object applies to both upstream and
downstream Service Flows.
If the referenced parameter is not present in the
corresponding DOCSIS QOS Parameter Set, the default
value of this object is 0. If the parameter is
not applicable, it is reported as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetMaxTrafficBurst.setDescription(
"""Specifies the token bucket size in bytes
for this parameter set. The value is calculated
from the byte following the MAC header HCS to
the end of the CRC. This object is applied in
conjunction with docsIetfQosParamSetMaxTrafficRate
to calculate maximum sustained traffic rate.
If the referenced parameter is not present in the
corresponding DOCSIS QOS Parameter Set, the default
value of this object for scheduling types
bestEffort (2), nonRealTimePollingService(3),
and realTimePollingService(4) is 3044.
If this parameter is not applicable, it is reported
as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetMinReservedRate.setDescription(
"""Specifies the guaranteed minimum rate in
bits/sec for this parameter set. The value is
calculated from the byte following the MAC
header HCS to the end of the CRC. The default
value of 0 means that no bandwidth is reserved.
If the referenced parameter is not present in the
corresponding DOCSIS QOS Parameter Set, the default
value of this object is 0. If the parameter
is not applicable, it is reported as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetMinReservedPkt.setDescription(
"""Specifies an assumed minimum packet size in
bytes for which the
docsIetfQosParamSetMinReservedRate will be
provided. The value is calculated from the byte
following the MAC header HCS to the end of the
CRC.
If the referenced parameter is omitted from a
DOCSIS QOS parameter set, the default value is
CMTS implementation dependent. In this case, the
CMTS reports the default value it is using, and the
CM reports a value of 0. If the referenced
parameter is not applicable to the direction or
scheduling type of the Service Flow, both CMTS and
CM report this object's value as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetActiveTimeout.setDescription(
"""Specifies the maximum duration in seconds that
resources remain unused on an active service
flow before CMTS signals that both active and
admitted parameters set are null. The default
value of 0 signifies an infinite amount of time.
If the referenced parameter is not present in the
corresponding DOCSIS QOS Parameter Set, the default
value of this object is 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetAdmittedTimeout.setDescription(
"""Specifies the maximum duration in seconds that
resources remain in admitted state before
resources must be released.
The value of 0 signifies an infinite amount
of time.
If the referenced parameter is not present in the
corresponding DOCSIS QOS Parameter Set, the
default value of this object is 200."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetMaxConcatBurst.setDescription(
"""Specifies the maximum concatenated burst in
bytes that an upstream Service Flow is allowed.
The value is calculated from the FC byte of the
Concatenation MAC Header to the last CRC byte in
of the last concatenated MAC frame, inclusive.
The value of 0 specifies no maximum burst.
If the referenced parameter is not present in the
corresponding DOCSIS QOS Parameter Set, the default
value of this object for scheduling types
bestEffort(2), nonRealTimePollingService(3), and
realTimePollingService(4) is 1522. If the parameter
is not applicable, this object's value is reported
as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetSchedulingType.setDescription(
"""Specifies the upstream scheduling service used for
upstream Service Flow.
If the referenced parameter is not present in the
corresponding DOCSIS QOS Parameter Set of an
upstream Service Flow, the default value of this
object is bestEffort(2). For QOS parameter sets of
downstream Service Flows, this object's value is
reported as undefined(1)."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetNomPollInterval.setDescription(
"""Specifies the nominal interval in microseconds
between successive unicast request
opportunities on an upstream Service Flow.
This object applies only to upstream Service Flows
with DocsIetfQosSchedulingType of value
nonRealTimePollingService(3),
realTimePollingService(4), and
unsolictedGrantServiceWithAD(5). The parameter is
mandatory for realTimePollingService(4). If the
parameter is omitted with
nonRealTimePollingService(3), the CMTS uses an
implementation-dependent value. If the parameter
is omitted with unsolictedGrantServiceWithAD(5),
the CMTS uses as a default value the value of the
Nominal Grant Interval parameter. In all cases,
the CMTS reports the value it is using when the
parameter is applicable. The CM reports the
signaled parameter value if it was signaled,
and 0 otherwise.
If the referenced parameter is not applicable to
the direction or scheduling type of the
corresponding DOCSIS QOS Parameter Set, both
CMTS and CM report this object's value as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetTolPollJitter.setDescription(
"""Specifies the maximum amount of time in
microseconds that the unicast request interval
may be delayed from the nominal periodic
schedule on an upstream Service Flow.
This parameter is applicable only to upstream
Service Flows with a DocsIetfQosSchedulingType of
realTimePollingService(4) or
unsolictedGrantServiceWithAD(5).
If the referenced parameter is applicable but not
present in the corresponding DOCSIS QOS Parameter
Set, the CMTS uses an implementation-dependent
value and reports the value it is using.
The CM reports a value of 0 in this case.
If the parameter is not applicable to the
direction or upstream scheduling type of the
Service Flow, both CMTS and CM report this
object's value as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetUnsolicitGrantSize.setDescription(
"""Specifies the unsolicited grant size in bytes.
The grant size includes the entire MAC frame
data PDU from the Frame Control byte to the end
of the MAC frame.
The referenced parameter is applicable only
for upstream flows with a DocsIetfQosSchedulingType
of unsolicitedGrantServicewithAD(5) or
unsolicitedGrantService(6), and it is mandatory
when applicable. Both CMTS and CM report
the signaled value of the parameter in this
case.
If the referenced parameter is not applicable to
the direction or scheduling type of the
corresponding DOCSIS QOS Parameter Set, both
CMTS and CM report this object's value as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetNomGrantInterval.setDescription(
"""Specifies the nominal interval in microseconds
between successive data grant opportunities
on an upstream Service Flow.
The referenced parameter is applicable only
for upstream flows with a DocsIetfQosSchedulingType
of unsolicitedGrantServicewithAD(5) or
unsolicitedGrantService(6), and it is mandatory
when applicable. Both CMTS and CM report the
signaled value of the parameter in this case.
If the referenced parameter is not applicable to
the direction or scheduling type of the
corresponding DOCSIS QOS Parameter Set, both
CMTS and CM report this object's value as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetTolGrantJitter.setDescription(
"""Specifies the maximum amount of time in
microseconds that the transmission opportunities
may be delayed from the nominal periodic schedule.
The referenced parameter is applicable only
for upstream flows with a DocsIetfQosSchedulingType
of unsolicitedGrantServicewithAD(5) or
unsolicitedGrantService(6), and it is mandatory
when applicable. Both CMTS and CM report the
signaled value of the parameter in this case.
If the referenced parameter is not applicable to
the direction or scheduling type of the
corresponding DOCSIS QOS Parameter Set, both
CMTS and CM report this object's value as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetGrantsPerInterval.setDescription(
"""Specifies the number of data grants per Nominal
Grant Interval
(docsIetfQosParamSetNomGrantInterval).
The referenced parameter is applicable only
for upstream flows with a DocsIetfQosSchedulingType
of unsolicitedGrantServicewithAD(5) or
unsolicitedGrantService(6), and it is mandatory
when applicable. Both CMTS and CM report the
signaled value of the parameter in this case.
If the referenced parameter is not applicable to
the direction or scheduling type of the
corresponding DOCSIS QOS Parameter Set, both
CMTS and CM report this object's value as 0."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetTosAndMask.setDescription(
"""Specifies the AND mask for the IP TOS byte for
overwriting IP packet's TOS value. The IP packet
TOS byte is bitwise ANDed with
docsIetfQosParamSetTosAndMask, and the result is
bitwise ORed with docsIetfQosParamSetTosORMask and
the result is written to the IP packet TOS byte.
A value of 'FF'H for docsIetfQosParamSetTosAndMask
and a value of '00'H for
docsIetfQosParamSetTosOrMask means that the IP
Packet TOS byte is not overwritten.
This combination is reported if the referenced
parameter is not present in a QOS Parameter Set.
The IP TOS octet as originally defined in RFC 791
has been superseded by the 6-bit Differentiated
Services Field (DSField, RFC 3260) and the 2-bit
Explicit Congestion Notification Field (ECN field,
RFC 3168). Network operators SHOULD avoid
specifying values of docsIetfQosParamSetTosAndMask
and docsIetfQosParamSetTosORMask that would result
in the modification of the ECN bits.
In particular, operators should not use values of
docsIetfQosParamSetTosAndMask that have either of
the least-significant two bits set to 0. Similarly,
operators should not use values of
docsIetfQosParamSetTosORMask that have either of
the least-significant two bits set to 1.
Even though this object is only enforced by the
Cable Modem Termination System (CMTS),
Cable Modems MUST report the value as signaled in
the referenced parameter."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetTosOrMask.setDescription(
"""Specifies the OR mask for the IP TOS byte.
See the description of docsIetfQosParamSetTosAndMask
for further details.
The IP TOS octet as originally defined in RFC 791
has been superseded by the 6-bit Differentiated
Services Field (DSField, RFC 3260) and the 2-bit
Explicit Congestion Notification Field (ECN field,
RFC 3168). Network operators SHOULD avoid
specifying values of docsIetfQosParamSetTosAndMask
and docsIetfQosParamSetTosORMask that would result
in the modification of the ECN bits."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetMaxLatency.setDescription(
"""Specifies the maximum latency between the
reception of a packet by the CMTS on its NSI
and the forwarding of the packet to the RF
interface. A value of 0 signifies no maximum
latency is enforced. This object only applies to
downstream Service Flows.
If the referenced parameter is not present in the
corresponding downstream DOCSIS QOS Parameter Set,
the default value is 0. This parameter is
not applicable to upstream DOCSIS QOS Parameter
Sets, and its value is reported as 0 in this case."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetType.setDescription(
"""Defines the type of the QOS parameter set defined
by this row. active(1) indicates the Active QOS
parameter set, describing the service currently
being provided by the DOCSIS MAC domain to the
Service Flow. admitted(2) indicates the Admitted
QOS Parameter Set, describing services reserved by
the DOCSIS MAC domain for use by the service
flow. provisioned (3) describes the QOS Parameter
Set defined in the DOCSIS CM Configuration file for
the Service Flow."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetRequestPolicyOct.setDescription(
"""Specifies which transmit interval opportunities
the CM omits for upstream transmission requests and
packet transmissions. This object takes its
default value for downstream Service Flows.
Unless otherwise indicated, a bit value of 1 means
that a CM must not use that opportunity for
upstream transmission.
If bit 0 is the least significant bit of the
least significant (4th) octet, and if bit number
is increased with significance, the bit definitions
are defined as follows:
broadcastReqOpp(0):
all CMs broadcast request opportunities
priorityReqMulticastReq(1):
priority request multicast request
opportunities
reqDataForReq(2):
request/data opportunities for requests
reqDataForData(3):
request/data opportunities for data
piggybackReqWithData(4):
piggyback requests with data
concatenateData(5):
concatenate data
fragmentData(6):
fragment data
suppresspayloadheaders(7):
suppress payload headers
dropPktsExceedUGSize(8):
A value of 1 means that the Service Flow must
drop packets that do not fit in the Unsolicited
Grant size.
If the referenced parameter is not present in
a QOS Parameter Set, the value of this object is
reported as '00000000'H."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetBitMap.setDescription(
"""This object indicates the set of QOS Parameter
Set parameters actually signaled in the
DOCSIS registration or dynamic service request
message that created or modified the QOS Parameter
Set. A bit is set to 1 when the parameter described
by the indicated reference section is present
in the original request.
Note that when Service Class names are expanded,
the registration or dynamic response message may
contain parameters as expanded by the CMTS based
on a stored service class. These expanded
parameters are not indicated by a 1 bit in this
object.
Note that even though some QOS Parameter Set
parameters may not be signaled in a message
(so that the paramater's bit in this object is 0),
the DOCSIS specification requires that default
values be used. These default values are reported
as the corresponding object's value in the row.
Note that BITS objects are encoded most
significant bit first. For example, if bits
1 and 16 are set, the value of this object
is the octet string '400080'H."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowTable.setDescription(
"""This table describes the set of DOCSIS-QOS
Service Flows in a managed device."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowEntry.setDescription(
"""Describes a Service Flow.
An entry in the table exists for each
Service Flow ID. The ifIndex is an
ifType of docsCableMaclayer(127)."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowId.setDescription(
'An index assigned to a Service Flow by CMTS.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowSID.setDescription(
"""Service Identifier (SID) assigned to an
admitted or active Service Flow. This object
reports a value of 0 if a Service ID is not
associated with the Service Flow. Only active
or admitted upstream Service Flows will have a
Service ID (SID)."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowDirection.setDescription(
'The direction of the Service Flow.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowPrimary.setDescription(
"""Object reflects whether Service Flow is the primary
or a secondary Service Flow.
A primary Service Flow is the default Service Flow
for otherwise unclassified traffic and all MAC
messages."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowStatsTable.setDescription(
"""This table describes statistics associated with the
Service Flows in a managed device."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowStatsEntry.setDescription(
"""Describes a set of Service Flow statistics.
An entry in the table exists for each
Service Flow ID. The ifIndex is an
ifType of docsCableMaclayer(127)."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowPkts.setDescription(
"""For outgoing Service Flows, this object counts the
number of Packet Data PDUs forwarded to this
Service Flow. For incoming upstream CMTS service
flows, this object counts the number of Packet
Data PDUs actually received on the Service Flow
identified by the SID for which the packet was
scheduled. CMs not classifying downstream packets
may report this object's value as 0 for downstream
Service Flows. This object does not count
MAC-specific management messages.
Particularly for UGS flows, packets sent on the
primary Service Flow in violation of the UGS grant
size should be counted only by the instance of this
object that is associated with the primary service
flow.
Unclassified upstream user data packets (i.e., non-
MAC-management) forwarded to the primary upstream
Service Flow should be counted by the instance of
this object that is associated with the primary
service flow.
This object does include packets counted by
docsIetfQosServiceFlowPolicedDelayPkts, but does not
include packets counted by
docsIetfQosServiceFlowPolicedDropPkts
and docsIetfQosServiceFlowPHSUnknowns.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowOctets.setDescription(
"""The number of octets from the byte after the MAC
header HCS to the end of the CRC for all packets
counted in the docsIetfQosServiceFlowPkts object for
this row. Note that this counts the octets after
payload header suppression and before payload
header expansion have been applied.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowTimeCreated.setDescription(
"""The value of sysUpTime when the service flow
was created.""")
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowTimeActive.setDescription(
"""The number of seconds that the service flow
has been active.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowPHSUnknowns.setDescription(
"""For incoming upstream CMTS service flows, this
object counts the number of packets received
with an unknown payload header suppression index.
The service flow is identified by the SID for which
the packet was scheduled.
On a CM, only this object's instance for the primary
downstream service flow counts packets received with
an unknown payload header suppression index. All
other downstream service flows on CM report this
objects value as 0.
All outgoing service flows report this object's
value as 0.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowPolicedDropPkts.setDescription(
"""For outgoing service flows, this object counts the
number of Packet Data PDUs classified to this
service flow dropped due to:
(1) implementation-dependent excessive delay
while enforcing the Maximum Sustained
Traffic Rate; or
(2) UGS packets dropped due to exceeding the
Unsolicited Grant Size with a
Request/Transmission policy that requires
such packets to be dropped.
Classified packets dropped due to other reasons
must be counted in ifOutDiscards for the interface
of this service flow. This object reports 0 for
incoming service flows.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowPolicedDelayPkts.setDescription(
"""This object counts only outgoing packets delayed in
order to maintain the Maximum Sustained Traffic
Rate. This object will always report a value of 0
for UGS flows because the Maximum Sustained Traffic
Rate does not apply. This object is 0 for incoming
service flows.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosUpstreamStatsTable.setDescription(
"""This table describes statistics associated with
upstream service flows. All counted frames must
be received without a Frame Check Sequence (FCS)
error."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosUpstreamStatsEntry.setDescription(
"""Describes a set of upstream service flow
statistics. An entry in the table exists for each
upstream Service Flow in a managed device.
The ifIndex is an ifType of
docsCableMaclayer(127)."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosSID.setDescription(
"""Identifies a service ID for an admitted or active
upstream service flow."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosUpstreamFragments.setDescription(
"""The number of fragmentation headers received on an
upstream service flow, regardless of whether
the fragment was correctly reassembled into a
valid packet.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosUpstreamFragDiscards.setDescription(
"""The number of upstream fragments discarded and not
assembled into a valid upstream packet.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosUpstreamConcatBursts.setDescription(
"""The number of concatenation headers received on an
upstream service flow.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDynamicServiceStatsTable.setDescription(
"""This table describes statistics associated with the
Dynamic Service Flows in a managed device."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDynamicServiceStatsEntry.setDescription(
"""Describes a set of dynamic service flow statistics.
Two entries exist for each DOCSIS MAC layer
interface for the upstream and downstream
direction. On the CMTS, the downstream direction
row indicates messages transmitted or transactions
originated by the CMTS. The upstream direction row
indicates messages received or transaction
originated by the CM. On the CM, the downstream
direction row indicates messages received or
transactions originated by the CMTS. The upstream
direction row indicates messages transmitted by
the CM or transactions originated by the CM.
The ifIndex is an ifType of
docsCableMaclayer(127)."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosIfDirection.setDescription('The direction of interface.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDSAReqs.setDescription(
"""The number of Dynamic Service Addition Requests,
including retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDSARsps.setDescription(
"""The number of Dynamic Service Addition Responses,
including retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDSAAcks.setDescription(
"""The number of Dynamic Service Addition
Acknowledgements, including retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDSCReqs.setDescription(
"""The number of Dynamic Service Change Requests,
including retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDSCRsps.setDescription(
"""The number of Dynamic Service Change Responses,
including retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDSCAcks.setDescription(
"""The number of Dynamic Service Change
Acknowledgements, including retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDSDReqs.setDescription(
"""The number of Dynamic Service Delete Requests,
including retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDSDRsps.setDescription(
"""The number of Dynamic Service Delete Responses,
including retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDynamicAdds.setDescription(
"""The number of successful Dynamic Service Addition
transactions.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDynamicAddFails.setDescription(
"""The number of failed Dynamic Service Addition
transactions.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDynamicChanges.setDescription(
"""The number of successful Dynamic Service Change
transactions.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDynamicChangeFails.setDescription(
"""The number of failed Dynamic Service Change
transactions.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDynamicDeletes.setDescription(
"""The number of successful Dynamic Service Delete
transactions.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDynamicDeleteFails.setDescription(
"""The number of failed Dynamic Service Delete
transactions.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDCCReqs.setDescription(
"""The number of Dynamic Channel Change Request
messages traversing an interface. This count
is nonzero only on downstream direction rows.
This count should include the number of retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex
that indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDCCRsps.setDescription(
"""The number of Dynamic Channel Change Response
messages traversing an interface. This count is
nonzero only on upstream direction rows. This count
should include the number of retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDCCAcks.setDescription(
"""The number of Dynamic Channel Change Acknowledgement
messages traversing an interface. This count
is nonzero only on downstream direction rows.
This count should include the number of retries.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDCCs.setDescription(
"""The number of successful Dynamic Channel Change
transactions. This count is nonzero only on
downstream direction rows.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosDCCFails.setDescription(
"""The number of failed Dynamic Channel Change
transactions. This count is nonzero only on
downstream direction rows.
This counter's last discontinuity is the
ifCounterDiscontinuityTime for the same ifIndex that
indexes this object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogTable.setDescription(
"""This table contains a log of the disconnected
Service Flows in a managed device."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogEntry.setDescription(
"""The information regarding a single disconnected
service flow.""")
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogIndex.setDescription(
'Unique index for a logged service flow.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogIfIndex.setDescription(
"""The ifIndex of ifType docsCableMaclayer(127)
on the CMTS where the service flow was present."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogSFID.setDescription(
'The index assigned to the service flow by the CMTS.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogCmMac.setDescription(
"""The MAC address for the cable modem associated with
the service flow."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogPkts.setDescription(
"""The number of packets counted on this service flow
after payload header suppression."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogOctets.setDescription(
"""The number of octets counted on this service flow
after payload header suppression."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogTimeDeleted.setDescription(
"""The value of sysUpTime when the service flow
was deleted.""")
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogTimeCreated.setDescription(
"""The value of sysUpTime when the service flow
was created.""")
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogTimeActive.setDescription(
'The total time that the service flow was active.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogDirection.setDescription(
"""The value of docsIetfQosServiceFlowDirection
for the service flow."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogPrimary.setDescription(
"""The value of docsIetfQosServiceFlowPrimary for the
service flow.""")
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogServiceClassName.setDescription(
"""The value of docsIetfQosParamSetServiceClassName for
the provisioned QOS Parameter Set of the
service flow."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogPolicedDropPkts.setDescription(
"""The final value of
docsIetfQosServiceFlowPolicedDropPkts for the
service flow."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogPolicedDelayPkts.setDescription(
"""The final value of
docsIetfQosServiceFlowPolicedDelayPkts for the
service flow."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceFlowLogControl.setDescription(
"""Setting this object to the value destroy(6) removes
this entry from the table.
Reading this object returns the value active(1)."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassTable.setDescription(
"""This table describes the set of DOCSIS-QOS
Service Classes in a CMTS."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassEntry.setDescription(
"""A provisioned service class on a CMTS.
Each entry defines a template for certain
DOCSIS QOS Parameter Set values. When a CM
creates or modifies an Admitted QOS Parameter Set
for a Service Flow, it may reference a Service Class
Name instead of providing explicit QOS Parameter
Set values. In this case, the CMTS populates
the QOS Parameter Set with the applicable
corresponding values from the named Service Class.
Subsequent changes to a Service Class row do not
affect the QOS Parameter Set values of any service
flows already admitted.
A service class template applies to only
a single direction, as indicated in the
docsIetfQosServiceClassDirection object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassName.setDescription(
"""Service Class Name. DOCSIS specifies that the
maximum size is 16 ASCII characters including
a terminating zero. The terminating zero is not
represented in this SnmpAdminString syntax object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassStatus.setDescription(
"""Used to create or delete rows in this table.
There is no restriction on the ability to change
values in this row while the row is active.
Inactive rows need not be timed out."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassPriority.setDescription(
'Template for docsIetfQosParamSetPriority.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassMaxTrafficRate.setDescription(
'Template for docsIetfQosParamSetMaxTrafficRate.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassMaxTrafficBurst.setDescription(
'Template for docsIetfQosParamSetMaxTrafficBurst.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassMinReservedRate.setDescription(
'Template for docsIetfQosParamSEtMinReservedRate.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassMinReservedPkt.setDescription(
'Template for docsIetfQosParamSetMinReservedPkt.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassMaxConcatBurst.setDescription(
'Template for docsIetfQosParamSetMaxConcatBurst.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassNomPollInterval.setDescription(
'Template for docsIetfQosParamSetNomPollInterval.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassTolPollJitter.setDescription(
'Template for docsIetfQosParamSetTolPollJitter.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassUnsolicitGrantSize.setDescription(
'Template for docsIetfQosParamSetUnsolicitGrantSize.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassNomGrantInterval.setDescription(
'Template for docsIetfQosParamSetNomGrantInterval.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassTolGrantJitter.setDescription(
'Template for docsIetfQosParamSetTolGrantJitter.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassGrantsPerInterval.setDescription(
'Template for docsIetfQosParamSetGrantsPerInterval.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassMaxLatency.setDescription(
'Template for docsIetfQosParamSetClassMaxLatency.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassActiveTimeout.setDescription(
'Template for docsIetfQosParamSetActiveTimeout.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassAdmittedTimeout.setDescription(
'Template for docsIetfQosParamSetAdmittedTimeout.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassSchedulingType.setDescription(
'Template for docsIetfQosParamSetSchedulingType.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassRequestPolicy.setDescription(
'Template for docsIetfQosParamSetRequestPolicyOct.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassTosAndMask.setDescription(
"""Template for docsIetfQosParamSetTosAndMask.
The IP TOS octet as originally defined in RFC 791
has been superseded by the 6-bit Differentiated
Services Field (DSField, RFC 3260) and the 2-bit
Explicit Congestion Notification Field (ECN field,
RFC 3168). Network operators SHOULD avoid
specifying values of
docsIetfQosServiceClassTosAndMask and
docsIetfQosServiceClassTosOrMask that would result
in the modification of the ECN bits.
In particular, operators should not use values of
docsIetfQosServiceClassTosAndMask that have either
of the least-significant two bits set to 0.
Similarly,operators should not use values of
docsIetfQosServiceClassTosOrMask that have either
of the least-significant two bits set to 1."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassTosOrMask.setDescription(
"""Template for docsIetfQosParamSetTosOrMask.
The IP TOS octet as originally defined in RFC 791
has been superseded by the 6-bit Differentiated
Services Field (DSField, RFC 3260) and the 2-bit
Explicit Congestion Notification Field (ECN field,
RFC 3168). Network operators SHOULD avoid
specifying values of
docsIetfQosServiceClassTosAndMask and
docsIetfQosServiceClassTosOrMask that would result
in the modification of the ECN bits.
In particular, operators should not use values of
docsIetfQosServiceClassTosAndMask that have either
of the least-significant two bits set to 0.
Similarly, operators should not use values of
docsIetfQosServiceClassTosOrMask that have either
of the least-significant two bits set to 1."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassDirection.setDescription(
"""Specifies whether the service class template
applies to upstream or downstream service flows."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassStorageType.setDescription(
"""This object defines whether this row is kept in
volatile storage and lost upon reboot or whether
it is backed up by non-volatile or permanent
storage. 'permanent' entries need not allow
writable access to any object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassDSCPOverwrite.setDescription(
"""This object allows the overwrite of the DSCP
field per RFC 3260.
If this object is -1, then the corresponding entry's
docsIetfQosServiceClassTosAndMask value MUST be
'FF'H and docsIetfQosServiceClassTosOrMask MUST be
'00'H. Otherwise, this object is in the range of
0..63, and the corresponding entry's
docsIetfQosServiceClassTosAndMask value MUST be
'03'H and the docsIetfQosServiceClassTosOrMask MUST
be this object's value shifted left by two bit
positions."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassPolicyTable.setDescription(
"""This table describes the set of DOCSIS-QOS
Service Class Policies.
This table is an adjunct to the
docsDevFilterPolicy table. Entries in the
docsDevFilterPolicy table can point to
specific rows in this table.
This table permits mapping a packet to a service
class name of an active service flow so long as
a classifier does not exist at a higher
priority."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassPolicyEntry.setDescription(
'A service class name policy entry.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassPolicyIndex.setDescription(
"""Index value to identify an entry in
this table uniquely.""")
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassPolicyName.setDescription(
"""Service Class Name to identify the name of the
service class flow to which the packet should be
directed."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassPolicyRulePriority.setDescription(
"""Service Class Policy rule priority for the
entry.""")
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassPolicyStatus.setDescription(
"""Used to create or delete rows in this table.
This object should not be deleted if it is
referenced by an entry in docsDevFilterPolicy.
The reference should be deleted first.
There is no restriction on the ability
to change values in this row while the row is
active. Inactive rows need not be timed out."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassPolicyStorageType.setDescription(
"""This object defines whether this row is kept in
volatile storage and lost upon reboot or whether
it is backed up by non-volatile or permanent
storage. 'permanent' entries need not allow
writable access to any object."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPHSTable.setDescription(
"""This table describes the set of payload header
suppression entries."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPHSEntry.setDescription(
"""A payload header suppression entry.
The ifIndex is an ifType of docsCableMaclayer(127).
The index docsIetfQosServiceFlowId selects one
service flow from the cable MAC layer interface.
The docsIetfQosPktClassId index matches an
index of the docsIetfQosPktClassTable."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPHSField.setDescription(
"""Payload header suppression field defines the
bytes of the header that must be
suppressed/restored by the sending/receiving
device.
The number of octets in this object should be
the same as the value of docsIetfQosPHSSize."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPHSMask.setDescription(
"""Payload header suppression mask defines the
bit mask that is used in combination with the
docsIetfQosPHSField. It defines which bytes in
the header must be suppressed/restored by the
sending or receiving device.
Each bit of this bit mask corresponds to a byte
in the docsIetfQosPHSField, with the least
significant bit corresponding to the first byte
of the docsIetfQosPHSField.
Each bit of the bit mask specifies whether
the corresponding byte should be suppressed
in the packet. A bit value of '1' indicates that
the byte should be suppressed by the sending
device and restored by the receiving device.
A bit value of '0' indicates that
the byte should not be suppressed by the sending
device or restored by the receiving device.
If the bit mask does not contain a bit for each
byte in the docsIetfQosPHSField, then the bit mask
is extended with bit values of '1' to be the
necessary length."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPHSSize.setDescription(
"""Payload header suppression size specifies the
number of bytes in the header to be suppressed
and restored.
The value of this object must match the number
of bytes in the docsIetfQosPHSField."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPHSVerify.setDescription(
"""Payload header suppression verification value. If
'true', the sender must verify docsIetfQosPHSField
is the same as what is contained in the packet
to be suppressed."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosPHSIndex.setDescription(
"""Payload header suppression index uniquely
references the PHS rule for a given service flow."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosCmtsMacToSrvFlowTable.setDescription(
"""This table provides for referencing the service
flows associated with a particular cable modem.
This allows indexing into other docsIetfQos
tables that are indexed by docsIetfQosServiceFlowId
and ifIndex."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosCmtsMacToSrvFlowEntry.setDescription(
"""An entry is created by CMTS for each service flow
connected to this CMTS."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosCmtsCmMac.setDescription(
'The MAC address for the referenced CM.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosCmtsServiceFlowId.setDescription(
'An index assigned to a service flow by CMTS.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosCmtsIfIndex.setDescription(
"""The ifIndex of ifType docsCableMacLayer(127)
on the CMTS that is connected to the Cable Modem."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosBaseGroup.setDescription(
"""Group of objects implemented in both Cable Modems and
Cable Modem Termination Systems."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosParamSetGroup.setDescription(
"""Group of objects implemented in both Cable Modems and
Cable Modem Termination Systems for QOS Parameter Sets."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosCmtsGroup.setDescription(
'Group of objects implemented only in the CMTS.')
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosSrvClassPolicyGroup.setDescription(
"""Group of objects implemented in both Cable Modems and
Cable Modem Termination Systems when supporting policy-based
service flows."""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosServiceClassGroup.setDescription(
"""Group of objects implemented only in Cable Modem
Termination Systems when supporting expansion of Service
Class Names in a QOS Parameter Set"""
)
<|reserved_special_token_0|>
if mibBuilder.loadTexts:
docsIetfQosCompliance.setDescription(
"""The compliance statement for MCNS Cable Modems and
Cable Modem Termination Systems that implement DOCSIS
Service Flows."""
)
mibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', PYSNMP_MODULE_ID=docsIetfQosMIB)
mibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', DocsIetfQosBitRate=
DocsIetfQosBitRate, DocsIetfQosRfMacIfDirection=
DocsIetfQosRfMacIfDirection, DocsIetfQosSchedulingType=
DocsIetfQosSchedulingType)
mibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', docsIetfQosMIB=docsIetfQosMIB,
docsIetfQosNotifications=docsIetfQosNotifications,
docsIetfQosMIBObjects=docsIetfQosMIBObjects, docsIetfQosPktClassTable=
docsIetfQosPktClassTable, docsIetfQosPktClassEntry=
docsIetfQosPktClassEntry, docsIetfQosPktClassId=docsIetfQosPktClassId,
docsIetfQosPktClassDirection=docsIetfQosPktClassDirection,
docsIetfQosPktClassPriority=docsIetfQosPktClassPriority,
docsIetfQosPktClassIpTosLow=docsIetfQosPktClassIpTosLow,
docsIetfQosPktClassIpTosHigh=docsIetfQosPktClassIpTosHigh,
docsIetfQosPktClassIpTosMask=docsIetfQosPktClassIpTosMask,
docsIetfQosPktClassIpProtocol=docsIetfQosPktClassIpProtocol,
docsIetfQosPktClassInetAddressType=docsIetfQosPktClassInetAddressType,
docsIetfQosPktClassInetSourceAddr=docsIetfQosPktClassInetSourceAddr,
docsIetfQosPktClassInetSourceMask=docsIetfQosPktClassInetSourceMask,
docsIetfQosPktClassInetDestAddr=docsIetfQosPktClassInetDestAddr,
docsIetfQosPktClassInetDestMask=docsIetfQosPktClassInetDestMask,
docsIetfQosPktClassSourcePortStart=docsIetfQosPktClassSourcePortStart,
docsIetfQosPktClassSourcePortEnd=docsIetfQosPktClassSourcePortEnd,
docsIetfQosPktClassDestPortStart=docsIetfQosPktClassDestPortStart,
docsIetfQosPktClassDestPortEnd=docsIetfQosPktClassDestPortEnd,
docsIetfQosPktClassDestMacAddr=docsIetfQosPktClassDestMacAddr,
docsIetfQosPktClassDestMacMask=docsIetfQosPktClassDestMacMask,
docsIetfQosPktClassSourceMacAddr=docsIetfQosPktClassSourceMacAddr,
docsIetfQosPktClassEnetProtocolType=docsIetfQosPktClassEnetProtocolType,
docsIetfQosPktClassEnetProtocol=docsIetfQosPktClassEnetProtocol,
docsIetfQosPktClassUserPriLow=docsIetfQosPktClassUserPriLow,
docsIetfQosPktClassUserPriHigh=docsIetfQosPktClassUserPriHigh,
docsIetfQosPktClassVlanId=docsIetfQosPktClassVlanId,
docsIetfQosPktClassStateActive=docsIetfQosPktClassStateActive,
docsIetfQosPktClassPkts=docsIetfQosPktClassPkts,
docsIetfQosPktClassBitMap=docsIetfQosPktClassBitMap,
docsIetfQosParamSetTable=docsIetfQosParamSetTable,
docsIetfQosParamSetEntry=docsIetfQosParamSetEntry,
docsIetfQosParamSetServiceClassName=docsIetfQosParamSetServiceClassName,
docsIetfQosParamSetPriority=docsIetfQosParamSetPriority,
docsIetfQosParamSetMaxTrafficRate=docsIetfQosParamSetMaxTrafficRate,
docsIetfQosParamSetMaxTrafficBurst=docsIetfQosParamSetMaxTrafficBurst,
docsIetfQosParamSetMinReservedRate=docsIetfQosParamSetMinReservedRate,
docsIetfQosParamSetMinReservedPkt=docsIetfQosParamSetMinReservedPkt,
docsIetfQosParamSetActiveTimeout=docsIetfQosParamSetActiveTimeout,
docsIetfQosParamSetAdmittedTimeout=docsIetfQosParamSetAdmittedTimeout,
docsIetfQosParamSetMaxConcatBurst=docsIetfQosParamSetMaxConcatBurst,
docsIetfQosParamSetSchedulingType=docsIetfQosParamSetSchedulingType,
docsIetfQosParamSetNomPollInterval=docsIetfQosParamSetNomPollInterval,
docsIetfQosParamSetTolPollJitter=docsIetfQosParamSetTolPollJitter,
docsIetfQosParamSetUnsolicitGrantSize=
docsIetfQosParamSetUnsolicitGrantSize,
docsIetfQosParamSetNomGrantInterval=docsIetfQosParamSetNomGrantInterval,
docsIetfQosParamSetTolGrantJitter=docsIetfQosParamSetTolGrantJitter,
docsIetfQosParamSetGrantsPerInterval=
docsIetfQosParamSetGrantsPerInterval, docsIetfQosParamSetTosAndMask=
docsIetfQosParamSetTosAndMask, docsIetfQosParamSetTosOrMask=
docsIetfQosParamSetTosOrMask, docsIetfQosParamSetMaxLatency=
docsIetfQosParamSetMaxLatency, docsIetfQosParamSetType=
docsIetfQosParamSetType, docsIetfQosParamSetRequestPolicyOct=
docsIetfQosParamSetRequestPolicyOct, docsIetfQosParamSetBitMap=
docsIetfQosParamSetBitMap, docsIetfQosServiceFlowTable=
docsIetfQosServiceFlowTable, docsIetfQosServiceFlowEntry=
docsIetfQosServiceFlowEntry, docsIetfQosServiceFlowId=
docsIetfQosServiceFlowId, docsIetfQosServiceFlowSID=
docsIetfQosServiceFlowSID, docsIetfQosServiceFlowDirection=
docsIetfQosServiceFlowDirection, docsIetfQosServiceFlowPrimary=
docsIetfQosServiceFlowPrimary, docsIetfQosServiceFlowStatsTable=
docsIetfQosServiceFlowStatsTable, docsIetfQosServiceFlowStatsEntry=
docsIetfQosServiceFlowStatsEntry, docsIetfQosServiceFlowPkts=
docsIetfQosServiceFlowPkts, docsIetfQosServiceFlowOctets=
docsIetfQosServiceFlowOctets, docsIetfQosServiceFlowTimeCreated=
docsIetfQosServiceFlowTimeCreated, docsIetfQosServiceFlowTimeActive=
docsIetfQosServiceFlowTimeActive, docsIetfQosServiceFlowPHSUnknowns=
docsIetfQosServiceFlowPHSUnknowns,
docsIetfQosServiceFlowPolicedDropPkts=
docsIetfQosServiceFlowPolicedDropPkts,
docsIetfQosServiceFlowPolicedDelayPkts=
docsIetfQosServiceFlowPolicedDelayPkts, docsIetfQosUpstreamStatsTable=
docsIetfQosUpstreamStatsTable, docsIetfQosUpstreamStatsEntry=
docsIetfQosUpstreamStatsEntry, docsIetfQosSID=docsIetfQosSID,
docsIetfQosUpstreamFragments=docsIetfQosUpstreamFragments,
docsIetfQosUpstreamFragDiscards=docsIetfQosUpstreamFragDiscards,
docsIetfQosUpstreamConcatBursts=docsIetfQosUpstreamConcatBursts,
docsIetfQosDynamicServiceStatsTable=docsIetfQosDynamicServiceStatsTable,
docsIetfQosDynamicServiceStatsEntry=docsIetfQosDynamicServiceStatsEntry,
docsIetfQosIfDirection=docsIetfQosIfDirection, docsIetfQosDSAReqs=
docsIetfQosDSAReqs, docsIetfQosDSARsps=docsIetfQosDSARsps,
docsIetfQosDSAAcks=docsIetfQosDSAAcks, docsIetfQosDSCReqs=
docsIetfQosDSCReqs, docsIetfQosDSCRsps=docsIetfQosDSCRsps,
docsIetfQosDSCAcks=docsIetfQosDSCAcks, docsIetfQosDSDReqs=
docsIetfQosDSDReqs, docsIetfQosDSDRsps=docsIetfQosDSDRsps,
docsIetfQosDynamicAdds=docsIetfQosDynamicAdds,
docsIetfQosDynamicAddFails=docsIetfQosDynamicAddFails,
docsIetfQosDynamicChanges=docsIetfQosDynamicChanges,
docsIetfQosDynamicChangeFails=docsIetfQosDynamicChangeFails,
docsIetfQosDynamicDeletes=docsIetfQosDynamicDeletes,
docsIetfQosDynamicDeleteFails=docsIetfQosDynamicDeleteFails,
docsIetfQosDCCReqs=docsIetfQosDCCReqs, docsIetfQosDCCRsps=
docsIetfQosDCCRsps, docsIetfQosDCCAcks=docsIetfQosDCCAcks,
docsIetfQosDCCs=docsIetfQosDCCs, docsIetfQosDCCFails=
docsIetfQosDCCFails, docsIetfQosServiceFlowLogTable=
docsIetfQosServiceFlowLogTable, docsIetfQosServiceFlowLogEntry=
docsIetfQosServiceFlowLogEntry, docsIetfQosServiceFlowLogIndex=
docsIetfQosServiceFlowLogIndex, docsIetfQosServiceFlowLogIfIndex=
docsIetfQosServiceFlowLogIfIndex, docsIetfQosServiceFlowLogSFID=
docsIetfQosServiceFlowLogSFID, docsIetfQosServiceFlowLogCmMac=
docsIetfQosServiceFlowLogCmMac, docsIetfQosServiceFlowLogPkts=
docsIetfQosServiceFlowLogPkts, docsIetfQosServiceFlowLogOctets=
docsIetfQosServiceFlowLogOctets, docsIetfQosServiceFlowLogTimeDeleted=
docsIetfQosServiceFlowLogTimeDeleted,
docsIetfQosServiceFlowLogTimeCreated=
docsIetfQosServiceFlowLogTimeCreated,
docsIetfQosServiceFlowLogTimeActive=docsIetfQosServiceFlowLogTimeActive,
docsIetfQosServiceFlowLogDirection=docsIetfQosServiceFlowLogDirection,
docsIetfQosServiceFlowLogPrimary=docsIetfQosServiceFlowLogPrimary,
docsIetfQosServiceFlowLogServiceClassName=
docsIetfQosServiceFlowLogServiceClassName,
docsIetfQosServiceFlowLogPolicedDropPkts=
docsIetfQosServiceFlowLogPolicedDropPkts,
docsIetfQosServiceFlowLogPolicedDelayPkts=
docsIetfQosServiceFlowLogPolicedDelayPkts,
docsIetfQosServiceFlowLogControl=docsIetfQosServiceFlowLogControl,
docsIetfQosServiceClassTable=docsIetfQosServiceClassTable,
docsIetfQosServiceClassEntry=docsIetfQosServiceClassEntry,
docsIetfQosServiceClassName=docsIetfQosServiceClassName,
docsIetfQosServiceClassStatus=docsIetfQosServiceClassStatus,
docsIetfQosServiceClassPriority=docsIetfQosServiceClassPriority,
docsIetfQosServiceClassMaxTrafficRate=
docsIetfQosServiceClassMaxTrafficRate,
docsIetfQosServiceClassMaxTrafficBurst=
docsIetfQosServiceClassMaxTrafficBurst,
docsIetfQosServiceClassMinReservedRate=
docsIetfQosServiceClassMinReservedRate,
docsIetfQosServiceClassMinReservedPkt=
docsIetfQosServiceClassMinReservedPkt,
docsIetfQosServiceClassMaxConcatBurst=docsIetfQosServiceClassMaxConcatBurst
)
mibBuilder.exportSymbols('DOCS-IETF-QOS-MIB',
docsIetfQosServiceClassNomPollInterval=
docsIetfQosServiceClassNomPollInterval,
docsIetfQosServiceClassTolPollJitter=
docsIetfQosServiceClassTolPollJitter,
docsIetfQosServiceClassUnsolicitGrantSize=
docsIetfQosServiceClassUnsolicitGrantSize,
docsIetfQosServiceClassNomGrantInterval=
docsIetfQosServiceClassNomGrantInterval,
docsIetfQosServiceClassTolGrantJitter=
docsIetfQosServiceClassTolGrantJitter,
docsIetfQosServiceClassGrantsPerInterval=
docsIetfQosServiceClassGrantsPerInterval,
docsIetfQosServiceClassMaxLatency=docsIetfQosServiceClassMaxLatency,
docsIetfQosServiceClassActiveTimeout=
docsIetfQosServiceClassActiveTimeout,
docsIetfQosServiceClassAdmittedTimeout=
docsIetfQosServiceClassAdmittedTimeout,
docsIetfQosServiceClassSchedulingType=
docsIetfQosServiceClassSchedulingType,
docsIetfQosServiceClassRequestPolicy=
docsIetfQosServiceClassRequestPolicy, docsIetfQosServiceClassTosAndMask
=docsIetfQosServiceClassTosAndMask, docsIetfQosServiceClassTosOrMask=
docsIetfQosServiceClassTosOrMask, docsIetfQosServiceClassDirection=
docsIetfQosServiceClassDirection, docsIetfQosServiceClassStorageType=
docsIetfQosServiceClassStorageType,
docsIetfQosServiceClassDSCPOverwrite=
docsIetfQosServiceClassDSCPOverwrite,
docsIetfQosServiceClassPolicyTable=docsIetfQosServiceClassPolicyTable,
docsIetfQosServiceClassPolicyEntry=docsIetfQosServiceClassPolicyEntry,
docsIetfQosServiceClassPolicyIndex=docsIetfQosServiceClassPolicyIndex,
docsIetfQosServiceClassPolicyName=docsIetfQosServiceClassPolicyName,
docsIetfQosServiceClassPolicyRulePriority=
docsIetfQosServiceClassPolicyRulePriority,
docsIetfQosServiceClassPolicyStatus=docsIetfQosServiceClassPolicyStatus,
docsIetfQosServiceClassPolicyStorageType=
docsIetfQosServiceClassPolicyStorageType, docsIetfQosPHSTable=
docsIetfQosPHSTable, docsIetfQosPHSEntry=docsIetfQosPHSEntry,
docsIetfQosPHSField=docsIetfQosPHSField, docsIetfQosPHSMask=
docsIetfQosPHSMask, docsIetfQosPHSSize=docsIetfQosPHSSize,
docsIetfQosPHSVerify=docsIetfQosPHSVerify, docsIetfQosPHSIndex=
docsIetfQosPHSIndex, docsIetfQosCmtsMacToSrvFlowTable=
docsIetfQosCmtsMacToSrvFlowTable, docsIetfQosCmtsMacToSrvFlowEntry=
docsIetfQosCmtsMacToSrvFlowEntry, docsIetfQosCmtsCmMac=
docsIetfQosCmtsCmMac, docsIetfQosCmtsServiceFlowId=
docsIetfQosCmtsServiceFlowId, docsIetfQosCmtsIfIndex=
docsIetfQosCmtsIfIndex, docsIetfQosConformance=docsIetfQosConformance,
docsIetfQosGroups=docsIetfQosGroups, docsIetfQosCompliances=
docsIetfQosCompliances)
mibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', docsIetfQosBaseGroup=
docsIetfQosBaseGroup, docsIetfQosParamSetGroup=docsIetfQosParamSetGroup,
docsIetfQosCmtsGroup=docsIetfQosCmtsGroup,
docsIetfQosSrvClassPolicyGroup=docsIetfQosSrvClassPolicyGroup,
docsIetfQosServiceClassGroup=docsIetfQosServiceClassGroup)
mibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', docsIetfQosCompliance=
docsIetfQosCompliance)
<|reserved_special_token_1|>
# PySNMP SMI module. Autogenerated from smidump -f python DOCS-IETF-QOS-MIB
# by libsmi2pysnmp-0.1.3 at Thu May 22 11:57:36 2014,
# Python version sys.version_info(major=2, minor=7, micro=2, releaselevel='final', serial=0)
# Imports
( Integer, ObjectIdentifier, OctetString, ) = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
( ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint")
( DscpOrAny, ) = mibBuilder.importSymbols("DIFFSERV-DSCP-TC", "DscpOrAny")
( InterfaceIndex, ifIndex, ) = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex", "ifIndex")
( InetAddress, InetAddressType, InetPortNumber, ) = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType", "InetPortNumber")
( SnmpAdminString, ) = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
( ModuleCompliance, ObjectGroup, ) = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup")
( Bits, Counter32, Counter64, Integer32, Integer32, ModuleIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Unsigned32, mib_2, ) = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Counter32", "Counter64", "Integer32", "Integer32", "ModuleIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Unsigned32", "mib-2")
( MacAddress, RowStatus, StorageType, TextualConvention, TimeStamp, TruthValue, ) = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "RowStatus", "StorageType", "TextualConvention", "TimeStamp", "TruthValue")
# Types
class DocsIetfQosBitRate(TextualConvention, Unsigned32):
displayHint = "d"
class DocsIetfQosRfMacIfDirection(Integer):
subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(2,1,)
namedValues = NamedValues(("downstream", 1), ("upstream", 2), )
class DocsIetfQosSchedulingType(Integer):
subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(3,1,5,6,2,4,)
namedValues = NamedValues(("undefined", 1), ("bestEffort", 2), ("nonRealTimePollingService", 3), ("realTimePollingService", 4), ("unsolictedGrantServiceWithAD", 5), ("unsolictedGrantService", 6), )
# Objects
docsIetfQosMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 127)).setRevisions(("2006-01-23 00:00",))
if mibBuilder.loadTexts: docsIetfQosMIB.setOrganization("IETF IP over Cable Data Network (IPCDN)\nWorking Group")
if mibBuilder.loadTexts: docsIetfQosMIB.setContactInfo("\nCo-Author: Michael Patrick\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7563\nE-mail: [email protected]\n\nCo-Author: William Murwin\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7594\nE-mail: [email protected]\n\nIETF IPCDN Working Group\nGeneral Discussion: [email protected]\nSubscribe: http://www.ietf.org/mailman/listinfo/ipcdn\nArchive: ftp://ftp.ietf.org/ietf-mail-archive/ipcdn\nCo-chairs: Richard Woundy, [email protected]\n Jean-Francois Mule, [email protected]")
if mibBuilder.loadTexts: docsIetfQosMIB.setDescription("This is the management information for\nQuality Of Service (QOS) for DOCSIS 1.1 and 2.0.\n\n\n\nCopyright (C) The Internet Society (2006). This version of\nthis MIB module is part of RFC 4323; see the RFC itself for\nfull legal notices.")
docsIetfQosNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 0))
docsIetfQosMIBObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 1))
docsIetfQosPktClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 1))
if mibBuilder.loadTexts: docsIetfQosPktClassTable.setDescription("This table describes the packet classification\nconfigured on the CM or CMTS.\nThe model is that a packet either received\nas input from an interface or transmitted\nfor output on an interface may be compared\nagainst an ordered list of rules pertaining to\nthe packet contents. Each rule is a row of this\ntable. A matching rule provides a Service Flow\nID to which the packet is classified.\nAll rules need to match for a packet to match\na classifier.\n\nThe objects in this row correspond to a set of\nClassifier Encoding parameters in a DOCSIS\nMAC management message. The\ndocsIetfQosPktClassBitMap indicates which\nparticular parameters were present in the\nclassifier as signaled in the DOCSIS message.\nIf the referenced parameter was not present\nin the signaled DOCSIS 1.1 and 2.0 Classifier, the\ncorresponding object in this row reports a\nvalue as specified in the DESCRIPTION section.")
docsIetfQosPktClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 1, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosPktClassId"))
if mibBuilder.loadTexts: docsIetfQosPktClassEntry.setDescription("An entry in this table provides a single packet\nclassifier rule. The index ifIndex is an ifType\nof docsCableMaclayer(127).")
docsIetfQosPktClassId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosPktClassId.setDescription("Index assigned to packet classifier entry by\nthe CMTS, which is unique per Service Flow.")
docsIetfQosPktClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 2), DocsIetfQosRfMacIfDirection()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDirection.setDescription("Indicates the direction to which the classifier\nis applied.")
docsIetfQosPktClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassPriority.setDescription("The value specifies the order of evaluation\nof the classifiers.\n\nThe higher the value, the higher the priority.\nThe value of 0 is used as default in\nprovisioned Service Flows Classifiers.\nThe default value of 64 is used for dynamic\nService Flow Classifiers.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the default\nvalue as defined above.")
docsIetfQosPktClassIpTosLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosLow.setDescription("The low value of a range of TOS byte values.\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet, as originally defined in RFC 791,\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as per the DOCSIS Specification\nfor packet classification.")
docsIetfQosPktClassIpTosHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosHigh.setDescription("The 8-bit high value of a range of TOS byte\nvalues.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the\nvalue of 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as defined by the DOCSIS Specification\nfor packet classification.")
docsIetfQosPktClassIpTosMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosMask.setDescription("The mask value is bitwise ANDed with TOS byte\nin an IP packet, and this value is used for\nrange checking of TosLow and TosHigh.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet per the DOCSIS Specification for packet\nclassification.")
docsIetfQosPktClassIpProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 258))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpProtocol.setDescription("This object indicates the value of the IP\nProtocol field required for IP packets to match\nthis rule.\n\n\n\n\nThe value 256 matches traffic with any IP Protocol\nvalue. The value 257 by convention matches both TCP\nand UDP.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 258.")
docsIetfQosPktClassInetAddressType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 8), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetAddressType.setDescription("The type of the Internet address for\ndocsIetfQosPktClassInetSourceAddr,\ndocsIetfQosPktClassInetSourceMask,\ndocsIetfQosPktClassInetDestAddr, and\ndocsIetfQosPktClassInetDestMask.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\nipv4(1).")
docsIetfQosPktClassInetSourceAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 9), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetSourceAddr.setDescription("This object specifies the value of the IP\nSource Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Source Address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.")
docsIetfQosPktClassInetSourceMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 10), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetSourceMask.setDescription("This object specifies which bits of a packet's\nIP Source Address are compared to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nsource address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosIpPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.")
docsIetfQosPktClassInetDestAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 11), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetDestAddr.setDescription("This object specifies the value of the IP\nDestination Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Destination Address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value\nequals the docsIetfQosPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.")
docsIetfQosPktClassInetDestMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 12), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetDestMask.setDescription("This object specifies which bits of a packet's\nIP Destination Address are compared to\nmatch this rule.\n\nAn IP packet matches the rule when the packet\ndestination address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value equals the\ndocsIetfQosIpPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.")
docsIetfQosPktClassSourcePortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 13), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourcePortStart.setDescription("This object specifies the low-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.")
docsIetfQosPktClassSourcePortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 14), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourcePortEnd.setDescription("This object specifies the high-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.")
docsIetfQosPktClassDestPortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 15), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestPortStart.setDescription("This object specifies the low-end inclusive\nrange of TCP/UDP destination port numbers to\nwhich a packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.")
docsIetfQosPktClassDestPortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 16), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestPortEnd.setDescription("This object specifies the high-end inclusive\nrange of TCP/UDP destination port numbers to which\na packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.")
docsIetfQosPktClassDestMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 17), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestMacAddr.setDescription("An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.")
docsIetfQosPktClassDestMacMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 18), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestMacMask.setDescription("An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.")
docsIetfQosPktClassSourceMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 19), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourceMacAddr.setDescription("An Ethernet packet matches this entry when its\nsource MAC address equals the value of\nthis object.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFFFFFF'H.")
docsIetfQosPktClassEnetProtocolType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(2,0,1,4,3,)).subtype(namedValues=NamedValues(("none", 0), ("ethertype", 1), ("dsap", 2), ("mac", 3), ("all", 4), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocolType.setDescription("This object indicates the format of the layer 3\nprotocol ID in the Ethernet packet. A value of\nnone(0) means that the rule does not use the\nlayer 3 protocol type as a matching criteria.\n\nA value of ethertype(1) means that the rule\napplies only to frames that contain an\nEtherType value. Ethertype values are contained\nin packets using the Dec-Intel-Xerox (DIX)\nencapsulation or the RFC1042 Sub-Network Access\nProtocol (SNAP) encapsulation formats.\n\nA value of dsap(2) means that the rule applies\n\n\n\nonly to frames using the IEEE802.3\nencapsulation format with a Destination Service\nAccess Point (DSAP) other\nthan 0xAA (which is reserved for SNAP).\n\nA value of mac(3) means that the rule applies\nonly to MAC management messages for MAC management\nmessages.\n\nA value of all(4) means that the rule matches\nall Ethernet packets.\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object\napplies to the embedded EtherType field within\nthe 802.1P/Q header.\n\nIf the referenced parameter is not present in a\nclassifier, this object reports the value of 0.")
docsIetfQosPktClassEnetProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocol.setDescription("If docsIetfQosEthPktClassProtocolType is none(0),\nthis object is ignored when considering whether\na packet matches the current rule.\n\nIf dosQosPktClassEnetProtocolType is ethertype(1),\nthis object gives the 16-bit value of the\nEtherType that the packet must match in order to\nmatch the rule.\n\nIf docsIetfQosPktClassEnetProtocolType is dsap(2),\nthe lower 8 bits of this object's value must match\nthe DSAP byte of the packet in order to match the\nrule.\n\nIf docsIetfQosPktClassEnetProtocolType is mac(3),\nthe lower 8 bits of this object's value represent a\nlower bound (inclusive) of MAC management message\ntype codes matched, and the upper 8 bits represent\nthe upper bound (inclusive) of matched MAC message\ntype codes. Certain message type codes are\nexcluded from matching, as specified in the\nreference.\n\n\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object applies\nto the embedded EtherType field within the 802.1P/Q\nheader.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassUserPriLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassUserPriLow.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Q tag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassUserPriHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassUserPriHigh.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Qtag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\n\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 7.")
docsIetfQosPktClassVlanId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassVlanId.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Q tag header.\n\nTagged packets must have a VLAN Identifier that\nmatches the value in order to match the rule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassStateActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 25), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassStateActive.setDescription("This object indicates whether or not the classifier\nis enabled to classify packets to a Service Flow.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas true(1).")
docsIetfQosPktClassPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassPkts.setDescription("This object counts the number of packets that have\nbeen classified using this entry. This\nincludes all packets delivered to a Service Flow\nmaximum rate policing function, whether or not that\nfunction drops the packets.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosPktClassBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 27), Bits().subtype(namedValues=NamedValues(("rulePriority", 0), ("activationState", 1), ("destPortStart", 10), ("destPortEnd", 11), ("destMac", 12), ("sourceMac", 13), ("ethertype", 14), ("userPri", 15), ("vlanId", 16), ("ipTos", 2), ("ipProtocol", 3), ("ipSourceAddr", 4), ("ipSourceMask", 5), ("ipDestAddr", 6), ("ipDestMask", 7), ("sourcePortStart", 8), ("sourcePortEnd", 9), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassBitMap.setDescription("This object indicates which parameter encodings\nwere actually present in the DOCSIS packet\nclassifier encoding signaled in the DOCSIS message\nthat created or modified the classifier. Note that\nDynamic Service Change messages have replace\nsemantics, so that all non-default parameters must\nbe present whether the classifier is being created\nor changed.\n\nA bit of this object is set to 1 if the parameter\nindicated by the comment was present in the\nclassifier encoding, and to 0 otherwise.\n\nNote that BITS are encoded most significant bit\nfirst, so that if, for example, bits 6 and 7 are\nset, this object is encoded as the octet string\n'030000'H.")
docsIetfQosParamSetTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 2))
if mibBuilder.loadTexts: docsIetfQosParamSetTable.setDescription("This table describes the set of DOCSIS 1.1 and 2.0\nQOS parameters defined in a managed device.\n\nThe ifIndex index specifies a DOCSIS MAC Domain.\nThe docsIetfQosServiceFlowId index specifies a\nparticular Service Flow.\nThe docsIetfQosParamSetType index indicates whether\nthe active, admitted, or provisioned QOS Parameter\nSet is being described by the row.\n\nOnly the QOS Parameter Sets of DOCSIS 1.1 and 2.0\nService Flows are represented in this table.\n\nDOCSIS 1.0 QOS service profiles are not\nrepresented in this table.\n\nEach row corresponds to a DOCSIS QOS Parameter Set\nas signaled via DOCSIS MAC management messages.\nEach object in the row corresponds to one or\npart of one DOCSIS 1.1 Service Flow Encoding.\nThe docsIetfQosParamSetBitMap object in the row\nindicates which particular parameters were signaled\nin the original registration or dynamic service\nrequest message that created the QOS Parameter Set.\n\nIn many cases, even if a QOS Parameter Set parameter\nwas not signaled, the DOCSIS specification calls\nfor a default value to be used. That default value\nis reported as the value of the corresponding object\nin this row.\n\nMany objects are not applicable, depending on\nthe Service Flow direction or upstream scheduling\ntype. The object value reported in this case\nis specified in the DESCRIPTION clause.")
docsIetfQosParamSetEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 2, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosParamSetType"))
if mibBuilder.loadTexts: docsIetfQosParamSetEntry.setDescription("A unique set of QOS parameters.")
docsIetfQosParamSetServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 1), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetServiceClassName.setDescription("Refers to the Service Class Name from which the\nparameter set values were derived.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is a zero-length string.")
docsIetfQosParamSetPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetPriority.setDescription("The relative priority of a Service Flow.\nHigher numbers indicate higher priority.\nThis priority should only be used to differentiate\n\n\n\nService Flow from identical parameter sets.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, the reported value is 0.")
docsIetfQosParamSetMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 3), DocsIetfQosBitRate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficRate.setDescription("Maximum sustained traffic rate allowed for this\nService Flow in bits/sec. Must count all MAC frame\ndata PDU from the bytes following the MAC header\nHCS to the end of the CRC. The number of bytes\nforwarded is limited during any time interval.\nThe value 0 means no maximum traffic rate is\nenforced. This object applies to both upstream and\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, it is reported as 0.")
docsIetfQosParamSetMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficBurst.setDescription("Specifies the token bucket size in bytes\nfor this parameter set. The value is calculated\nfrom the byte following the MAC header HCS to\nthe end of the CRC. This object is applied in\nconjunction with docsIetfQosParamSetMaxTrafficRate\nto calculate maximum sustained traffic rate.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort (2), nonRealTimePollingService(3),\nand realTimePollingService(4) is 3044.\n\nIf this parameter is not applicable, it is reported\nas 0.")
docsIetfQosParamSetMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 5), DocsIetfQosBitRate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMinReservedRate.setDescription("Specifies the guaranteed minimum rate in\nbits/sec for this parameter set. The value is\ncalculated from the byte following the MAC\nheader HCS to the end of the CRC. The default\nvalue of 0 means that no bandwidth is reserved.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter\nis not applicable, it is reported as 0.")
docsIetfQosParamSetMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMinReservedPkt.setDescription("Specifies an assumed minimum packet size in\nbytes for which the\ndocsIetfQosParamSetMinReservedRate will be\nprovided. The value is calculated from the byte\nfollowing the MAC header HCS to the end of the\nCRC.\n\nIf the referenced parameter is omitted from a\nDOCSIS QOS parameter set, the default value is\nCMTS implementation dependent. In this case, the\nCMTS reports the default value it is using, and the\nCM reports a value of 0. If the referenced\nparameter is not applicable to the direction or\nscheduling type of the Service Flow, both CMTS and\nCM report this object's value as 0.")
docsIetfQosParamSetActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetActiveTimeout.setDescription("Specifies the maximum duration in seconds that\nresources remain unused on an active service\nflow before CMTS signals that both active and\nadmitted parameters set are null. The default\nvalue of 0 signifies an infinite amount of time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0.")
docsIetfQosParamSetAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetAdmittedTimeout.setDescription("Specifies the maximum duration in seconds that\nresources remain in admitted state before\nresources must be released.\n\nThe value of 0 signifies an infinite amount\nof time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the\ndefault value of this object is 200.")
docsIetfQosParamSetMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxConcatBurst.setDescription("Specifies the maximum concatenated burst in\nbytes that an upstream Service Flow is allowed.\nThe value is calculated from the FC byte of the\nConcatenation MAC Header to the last CRC byte in\nof the last concatenated MAC frame, inclusive.\nThe value of 0 specifies no maximum burst.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort(2), nonRealTimePollingService(3), and\n\n\n\nrealTimePollingService(4) is 1522. If the parameter\nis not applicable, this object's value is reported\nas 0.")
docsIetfQosParamSetSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 10), DocsIetfQosSchedulingType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetSchedulingType.setDescription("Specifies the upstream scheduling service used for\nupstream Service Flow.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set of an\nupstream Service Flow, the default value of this\nobject is bestEffort(2). For QOS parameter sets of\ndownstream Service Flows, this object's value is\nreported as undefined(1).")
docsIetfQosParamSetNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 11), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetNomPollInterval.setDescription("Specifies the nominal interval in microseconds\nbetween successive unicast request\nopportunities on an upstream Service Flow.\n\nThis object applies only to upstream Service Flows\nwith DocsIetfQosSchedulingType of value\nnonRealTimePollingService(3),\nrealTimePollingService(4), and\nunsolictedGrantServiceWithAD(5). The parameter is\nmandatory for realTimePollingService(4). If the\nparameter is omitted with\nnonRealTimePollingService(3), the CMTS uses an\nimplementation-dependent value. If the parameter\nis omitted with unsolictedGrantServiceWithAD(5),\nthe CMTS uses as a default value the value of the\nNominal Grant Interval parameter. In all cases,\nthe CMTS reports the value it is using when the\nparameter is applicable. The CM reports the\nsignaled parameter value if it was signaled,\nand 0 otherwise.\n\n\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 12), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTolPollJitter.setDescription("Specifies the maximum amount of time in\nmicroseconds that the unicast request interval\nmay be delayed from the nominal periodic\nschedule on an upstream Service Flow.\n\nThis parameter is applicable only to upstream\nService Flows with a DocsIetfQosSchedulingType of\nrealTimePollingService(4) or\nunsolictedGrantServiceWithAD(5).\n\nIf the referenced parameter is applicable but not\npresent in the corresponding DOCSIS QOS Parameter\nSet, the CMTS uses an implementation-dependent\nvalue and reports the value it is using.\nThe CM reports a value of 0 in this case.\n\nIf the parameter is not applicable to the\ndirection or upstream scheduling type of the\nService Flow, both CMTS and CM report this\nobject's value as 0.")
docsIetfQosParamSetUnsolicitGrantSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetUnsolicitGrantSize.setDescription("Specifies the unsolicited grant size in bytes.\nThe grant size includes the entire MAC frame\ndata PDU from the Frame Control byte to the end\nof the MAC frame.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\n\n\n\nwhen applicable. Both CMTS and CM report\nthe signaled value of the parameter in this\ncase.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetNomGrantInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 14), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetNomGrantInterval.setDescription("Specifies the nominal interval in microseconds\nbetween successive data grant opportunities\non an upstream Service Flow.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetTolGrantJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 15), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTolGrantJitter.setDescription("Specifies the maximum amount of time in\nmicroseconds that the transmission opportunities\nmay be delayed from the nominal periodic schedule.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\n\n\n\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetGrantsPerInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetGrantsPerInterval.setDescription("Specifies the number of data grants per Nominal\nGrant Interval\n(docsIetfQosParamSetNomGrantInterval).\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetTosAndMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 17), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTosAndMask.setDescription("Specifies the AND mask for the IP TOS byte for\noverwriting IP packet's TOS value. The IP packet\nTOS byte is bitwise ANDed with\ndocsIetfQosParamSetTosAndMask, and the result is\nbitwise ORed with docsIetfQosParamSetTosORMask and\nthe result is written to the IP packet TOS byte.\nA value of 'FF'H for docsIetfQosParamSetTosAndMask\nand a value of '00'H for\ndocsIetfQosParamSetTosOrMask means that the IP\nPacket TOS byte is not overwritten.\n\nThis combination is reported if the referenced\nparameter is not present in a QOS Parameter Set.\n\n\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of docsIetfQosParamSetTosAndMask\nand docsIetfQosParamSetTosORMask that would result\nin the modification of the ECN bits.\n\nIn particular, operators should not use values of\ndocsIetfQosParamSetTosAndMask that have either of\nthe least-significant two bits set to 0. Similarly,\noperators should not use values of\ndocsIetfQosParamSetTosORMask that have either of\nthe least-significant two bits set to 1.\n\nEven though this object is only enforced by the\nCable Modem Termination System (CMTS),\nCable Modems MUST report the value as signaled in\nthe referenced parameter.")
docsIetfQosParamSetTosOrMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 18), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTosOrMask.setDescription("Specifies the OR mask for the IP TOS byte.\n\nSee the description of docsIetfQosParamSetTosAndMask\nfor further details.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of docsIetfQosParamSetTosAndMask\nand docsIetfQosParamSetTosORMask that would result\nin the modification of the ECN bits.")
docsIetfQosParamSetMaxLatency = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 19), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxLatency.setDescription("Specifies the maximum latency between the\nreception of a packet by the CMTS on its NSI\nand the forwarding of the packet to the RF\ninterface. A value of 0 signifies no maximum\nlatency is enforced. This object only applies to\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding downstream DOCSIS QOS Parameter Set,\nthe default value is 0. This parameter is\nnot applicable to upstream DOCSIS QOS Parameter\nSets, and its value is reported as 0 in this case.")
docsIetfQosParamSetType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(1,3,2,)).subtype(namedValues=NamedValues(("active", 1), ("admitted", 2), ("provisioned", 3), ))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosParamSetType.setDescription("Defines the type of the QOS parameter set defined\nby this row. active(1) indicates the Active QOS\nparameter set, describing the service currently\nbeing provided by the DOCSIS MAC domain to the\nService Flow. admitted(2) indicates the Admitted\nQOS Parameter Set, describing services reserved by\nthe DOCSIS MAC domain for use by the service\nflow. provisioned (3) describes the QOS Parameter\nSet defined in the DOCSIS CM Configuration file for\nthe Service Flow.")
docsIetfQosParamSetRequestPolicyOct = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetRequestPolicyOct.setDescription("Specifies which transmit interval opportunities\nthe CM omits for upstream transmission requests and\npacket transmissions. This object takes its\ndefault value for downstream Service Flows.\n\nUnless otherwise indicated, a bit value of 1 means\nthat a CM must not use that opportunity for\nupstream transmission.\n\nIf bit 0 is the least significant bit of the\nleast significant (4th) octet, and if bit number\nis increased with significance, the bit definitions\nare defined as follows:\n\nbroadcastReqOpp(0):\n all CMs broadcast request opportunities\n\npriorityReqMulticastReq(1):\n priority request multicast request\n opportunities\n\nreqDataForReq(2):\n request/data opportunities for requests\n\nreqDataForData(3):\n request/data opportunities for data\n\npiggybackReqWithData(4):\n piggyback requests with data\n\nconcatenateData(5):\n concatenate data\n\nfragmentData(6):\n fragment data\n\nsuppresspayloadheaders(7):\n suppress payload headers\n\n\n\n\ndropPktsExceedUGSize(8):\n A value of 1 means that the Service Flow must\n drop packets that do not fit in the Unsolicited\n Grant size.\n\nIf the referenced parameter is not present in\na QOS Parameter Set, the value of this object is\nreported as '00000000'H.")
docsIetfQosParamSetBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 22), Bits().subtype(namedValues=NamedValues(("trafficPriority", 0), ("maxTrafficRate", 1), ("nomPollInterval", 10), ("tolPollJitter", 11), ("unsolicitGrantSize", 12), ("nomGrantInterval", 13), ("tolGrantJitter", 14), ("grantsPerInterval", 15), ("tosOverwrite", 16), ("maxLatency", 17), ("maxTrafficBurst", 2), ("minReservedRate", 3), ("minReservedPkt", 4), ("activeTimeout", 5), ("admittedTimeout", 6), ("maxConcatBurst", 7), ("schedulingType", 8), ("requestPolicy", 9), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetBitMap.setDescription("This object indicates the set of QOS Parameter\nSet parameters actually signaled in the\nDOCSIS registration or dynamic service request\nmessage that created or modified the QOS Parameter\nSet. A bit is set to 1 when the parameter described\nby the indicated reference section is present\nin the original request.\n\nNote that when Service Class names are expanded,\nthe registration or dynamic response message may\ncontain parameters as expanded by the CMTS based\n\n\n\non a stored service class. These expanded\nparameters are not indicated by a 1 bit in this\nobject.\n\nNote that even though some QOS Parameter Set\nparameters may not be signaled in a message\n(so that the paramater's bit in this object is 0),\nthe DOCSIS specification requires that default\nvalues be used. These default values are reported\nas the corresponding object's value in the row.\n\nNote that BITS objects are encoded most\nsignificant bit first. For example, if bits\n1 and 16 are set, the value of this object\nis the octet string '400080'H.")
docsIetfQosServiceFlowTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 3))
if mibBuilder.loadTexts: docsIetfQosServiceFlowTable.setDescription("This table describes the set of DOCSIS-QOS\nService Flows in a managed device.")
docsIetfQosServiceFlowEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 3, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"))
if mibBuilder.loadTexts: docsIetfQosServiceFlowEntry.setDescription("Describes a Service Flow.\nAn entry in the table exists for each\nService Flow ID. The ifIndex is an\nifType of docsCableMaclayer(127).")
docsIetfQosServiceFlowId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceFlowId.setDescription("An index assigned to a Service Flow by CMTS.")
docsIetfQosServiceFlowSID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16383))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowSID.setDescription("Service Identifier (SID) assigned to an\nadmitted or active Service Flow. This object\nreports a value of 0 if a Service ID is not\nassociated with the Service Flow. Only active\nor admitted upstream Service Flows will have a\nService ID (SID).")
docsIetfQosServiceFlowDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 3), DocsIetfQosRfMacIfDirection()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowDirection.setDescription("The direction of the Service Flow.")
docsIetfQosServiceFlowPrimary = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 4), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPrimary.setDescription("Object reflects whether Service Flow is the primary\nor a secondary Service Flow.\n\nA primary Service Flow is the default Service Flow\nfor otherwise unclassified traffic and all MAC\nmessages.")
docsIetfQosServiceFlowStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 4))
if mibBuilder.loadTexts: docsIetfQosServiceFlowStatsTable.setDescription("This table describes statistics associated with the\nService Flows in a managed device.")
docsIetfQosServiceFlowStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 4, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"))
if mibBuilder.loadTexts: docsIetfQosServiceFlowStatsEntry.setDescription("Describes a set of Service Flow statistics.\nAn entry in the table exists for each\nService Flow ID. The ifIndex is an\nifType of docsCableMaclayer(127).")
docsIetfQosServiceFlowPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPkts.setDescription("For outgoing Service Flows, this object counts the\nnumber of Packet Data PDUs forwarded to this\nService Flow. For incoming upstream CMTS service\nflows, this object counts the number of Packet\nData PDUs actually received on the Service Flow\nidentified by the SID for which the packet was\nscheduled. CMs not classifying downstream packets\nmay report this object's value as 0 for downstream\nService Flows. This object does not count\nMAC-specific management messages.\n\nParticularly for UGS flows, packets sent on the\nprimary Service Flow in violation of the UGS grant\nsize should be counted only by the instance of this\nobject that is associated with the primary service\n\n\n\nflow.\n\nUnclassified upstream user data packets (i.e., non-\nMAC-management) forwarded to the primary upstream\nService Flow should be counted by the instance of\nthis object that is associated with the primary\nservice flow.\n\nThis object does include packets counted by\ndocsIetfQosServiceFlowPolicedDelayPkts, but does not\ninclude packets counted by\ndocsIetfQosServiceFlowPolicedDropPkts\nand docsIetfQosServiceFlowPHSUnknowns.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowOctets.setDescription("The number of octets from the byte after the MAC\nheader HCS to the end of the CRC for all packets\ncounted in the docsIetfQosServiceFlowPkts object for\nthis row. Note that this counts the octets after\npayload header suppression and before payload\nheader expansion have been applied.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowTimeCreated = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 3), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowTimeCreated.setDescription("The value of sysUpTime when the service flow\nwas created.")
docsIetfQosServiceFlowTimeActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowTimeActive.setDescription("The number of seconds that the service flow\nhas been active.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowPHSUnknowns = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPHSUnknowns.setDescription("For incoming upstream CMTS service flows, this\nobject counts the number of packets received\nwith an unknown payload header suppression index.\nThe service flow is identified by the SID for which\nthe packet was scheduled.\n\nOn a CM, only this object's instance for the primary\ndownstream service flow counts packets received with\nan unknown payload header suppression index. All\nother downstream service flows on CM report this\nobjects value as 0.\n\nAll outgoing service flows report this object's\nvalue as 0.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowPolicedDropPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPolicedDropPkts.setDescription("For outgoing service flows, this object counts the\nnumber of Packet Data PDUs classified to this\nservice flow dropped due to:\n (1) implementation-dependent excessive delay\n while enforcing the Maximum Sustained\n Traffic Rate; or\n (2) UGS packets dropped due to exceeding the\n Unsolicited Grant Size with a\n Request/Transmission policy that requires\n such packets to be dropped.\n\nClassified packets dropped due to other reasons\n\n\n\nmust be counted in ifOutDiscards for the interface\nof this service flow. This object reports 0 for\nincoming service flows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowPolicedDelayPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPolicedDelayPkts.setDescription("This object counts only outgoing packets delayed in\norder to maintain the Maximum Sustained Traffic\nRate. This object will always report a value of 0\nfor UGS flows because the Maximum Sustained Traffic\nRate does not apply. This object is 0 for incoming\nservice flows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosUpstreamStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 5))
if mibBuilder.loadTexts: docsIetfQosUpstreamStatsTable.setDescription("This table describes statistics associated with\nupstream service flows. All counted frames must\nbe received without a Frame Check Sequence (FCS)\nerror.")
docsIetfQosUpstreamStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 5, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosSID"))
if mibBuilder.loadTexts: docsIetfQosUpstreamStatsEntry.setDescription("Describes a set of upstream service flow\nstatistics. An entry in the table exists for each\nupstream Service Flow in a managed device.\nThe ifIndex is an ifType of\ndocsCableMaclayer(127).")
docsIetfQosSID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 16383))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosSID.setDescription("Identifies a service ID for an admitted or active\nupstream service flow.")
docsIetfQosUpstreamFragments = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosUpstreamFragments.setDescription("The number of fragmentation headers received on an\nupstream service flow, regardless of whether\nthe fragment was correctly reassembled into a\nvalid packet.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosUpstreamFragDiscards = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosUpstreamFragDiscards.setDescription("The number of upstream fragments discarded and not\nassembled into a valid upstream packet.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosUpstreamConcatBursts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosUpstreamConcatBursts.setDescription("The number of concatenation headers received on an\nupstream service flow.\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicServiceStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 6))
if mibBuilder.loadTexts: docsIetfQosDynamicServiceStatsTable.setDescription("This table describes statistics associated with the\nDynamic Service Flows in a managed device.")
docsIetfQosDynamicServiceStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 6, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosIfDirection"))
if mibBuilder.loadTexts: docsIetfQosDynamicServiceStatsEntry.setDescription("Describes a set of dynamic service flow statistics.\nTwo entries exist for each DOCSIS MAC layer\ninterface for the upstream and downstream\ndirection. On the CMTS, the downstream direction\nrow indicates messages transmitted or transactions\noriginated by the CMTS. The upstream direction row\nindicates messages received or transaction\noriginated by the CM. On the CM, the downstream\ndirection row indicates messages received or\ntransactions originated by the CMTS. The upstream\ndirection row indicates messages transmitted by\nthe CM or transactions originated by the CM.\nThe ifIndex is an ifType of\ndocsCableMaclayer(127).")
docsIetfQosIfDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 1), DocsIetfQosRfMacIfDirection()).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosIfDirection.setDescription("The direction of interface.")
docsIetfQosDSAReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSAReqs.setDescription("The number of Dynamic Service Addition Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSARsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSARsps.setDescription("The number of Dynamic Service Addition Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDSAAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSAAcks.setDescription("The number of Dynamic Service Addition\nAcknowledgements, including retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSCReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSCReqs.setDescription("The number of Dynamic Service Change Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSCRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSCRsps.setDescription("The number of Dynamic Service Change Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSCAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSCAcks.setDescription("The number of Dynamic Service Change\nAcknowledgements, including retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDSDReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSDReqs.setDescription("The number of Dynamic Service Delete Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSDRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSDRsps.setDescription("The number of Dynamic Service Delete Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicAdds = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicAdds.setDescription("The number of successful Dynamic Service Addition\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicAddFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicAddFails.setDescription("The number of failed Dynamic Service Addition\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDynamicChanges = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicChanges.setDescription("The number of successful Dynamic Service Change\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicChangeFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicChangeFails.setDescription("The number of failed Dynamic Service Change\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicDeletes = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicDeletes.setDescription("The number of successful Dynamic Service Delete\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicDeleteFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicDeleteFails.setDescription("The number of failed Dynamic Service Delete\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDCCReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCReqs.setDescription("The number of Dynamic Channel Change Request\nmessages traversing an interface. This count\nis nonzero only on downstream direction rows.\nThis count should include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex\nthat indexes this object.")
docsIetfQosDCCRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCRsps.setDescription("The number of Dynamic Channel Change Response\nmessages traversing an interface. This count is\nnonzero only on upstream direction rows. This count\nshould include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDCCAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCAcks.setDescription("The number of Dynamic Channel Change Acknowledgement\nmessages traversing an interface. This count\nis nonzero only on downstream direction rows.\nThis count should include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDCCs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCs.setDescription("The number of successful Dynamic Channel Change\ntransactions. This count is nonzero only on\ndownstream direction rows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDCCFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCFails.setDescription("The number of failed Dynamic Channel Change\ntransactions. This count is nonzero only on\ndownstream direction rows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowLogTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 7))
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTable.setDescription("This table contains a log of the disconnected\nService Flows in a managed device.")
docsIetfQosServiceFlowLogEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 7, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogIndex"))
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogEntry.setDescription("The information regarding a single disconnected\nservice flow.")
docsIetfQosServiceFlowLogIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogIndex.setDescription("Unique index for a logged service flow.")
docsIetfQosServiceFlowLogIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 2), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogIfIndex.setDescription("The ifIndex of ifType docsCableMaclayer(127)\non the CMTS where the service flow was present.")
docsIetfQosServiceFlowLogSFID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogSFID.setDescription("The index assigned to the service flow by the CMTS.")
docsIetfQosServiceFlowLogCmMac = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogCmMac.setDescription("The MAC address for the cable modem associated with\nthe service flow.")
docsIetfQosServiceFlowLogPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPkts.setDescription("The number of packets counted on this service flow\nafter payload header suppression.")
docsIetfQosServiceFlowLogOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogOctets.setDescription("The number of octets counted on this service flow\nafter payload header suppression.")
docsIetfQosServiceFlowLogTimeDeleted = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 7), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeDeleted.setDescription("The value of sysUpTime when the service flow\nwas deleted.")
docsIetfQosServiceFlowLogTimeCreated = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 8), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeCreated.setDescription("The value of sysUpTime when the service flow\nwas created.")
docsIetfQosServiceFlowLogTimeActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeActive.setDescription("The total time that the service flow was active.")
docsIetfQosServiceFlowLogDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 10), DocsIetfQosRfMacIfDirection()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogDirection.setDescription("The value of docsIetfQosServiceFlowDirection\nfor the service flow.")
docsIetfQosServiceFlowLogPrimary = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 11), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPrimary.setDescription("The value of docsIetfQosServiceFlowPrimary for the\nservice flow.")
docsIetfQosServiceFlowLogServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 12), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogServiceClassName.setDescription("The value of docsIetfQosParamSetServiceClassName for\nthe provisioned QOS Parameter Set of the\nservice flow.")
docsIetfQosServiceFlowLogPolicedDropPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPolicedDropPkts.setDescription("The final value of\ndocsIetfQosServiceFlowPolicedDropPkts for the\nservice flow.")
docsIetfQosServiceFlowLogPolicedDelayPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPolicedDelayPkts.setDescription("The final value of\ndocsIetfQosServiceFlowPolicedDelayPkts for the\nservice flow.")
docsIetfQosServiceFlowLogControl = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 15), Integer().subtype(subtypeSpec=SingleValueConstraint(1,6,)).subtype(namedValues=NamedValues(("active", 1), ("destroy", 6), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogControl.setDescription("Setting this object to the value destroy(6) removes\nthis entry from the table.\n\nReading this object returns the value active(1).")
docsIetfQosServiceClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 8))
if mibBuilder.loadTexts: docsIetfQosServiceClassTable.setDescription("This table describes the set of DOCSIS-QOS\nService Classes in a CMTS.")
docsIetfQosServiceClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 8, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassName"))
if mibBuilder.loadTexts: docsIetfQosServiceClassEntry.setDescription("A provisioned service class on a CMTS.\nEach entry defines a template for certain\nDOCSIS QOS Parameter Set values. When a CM\ncreates or modifies an Admitted QOS Parameter Set\nfor a Service Flow, it may reference a Service Class\nName instead of providing explicit QOS Parameter\nSet values. In this case, the CMTS populates\nthe QOS Parameter Set with the applicable\ncorresponding values from the named Service Class.\nSubsequent changes to a Service Class row do not\naffect the QOS Parameter Set values of any service\nflows already admitted.\n\nA service class template applies to only\na single direction, as indicated in the\ndocsIetfQosServiceClassDirection object.")
docsIetfQosServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceClassName.setDescription("Service Class Name. DOCSIS specifies that the\nmaximum size is 16 ASCII characters including\na terminating zero. The terminating zero is not\nrepresented in this SnmpAdminString syntax object.")
docsIetfQosServiceClassStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassStatus.setDescription("Used to create or delete rows in this table.\nThere is no restriction on the ability to change\nvalues in this row while the row is active.\nInactive rows need not be timed out.")
docsIetfQosServiceClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPriority.setDescription("Template for docsIetfQosParamSetPriority.")
docsIetfQosServiceClassMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 4), DocsIetfQosBitRate().clone('0')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxTrafficRate.setDescription("Template for docsIetfQosParamSetMaxTrafficRate.")
docsIetfQosServiceClassMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 5), Unsigned32().clone(3044)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxTrafficBurst.setDescription("Template for docsIetfQosParamSetMaxTrafficBurst.")
docsIetfQosServiceClassMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 6), DocsIetfQosBitRate().clone('0')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMinReservedRate.setDescription("Template for docsIetfQosParamSEtMinReservedRate.")
docsIetfQosServiceClassMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMinReservedPkt.setDescription("Template for docsIetfQosParamSetMinReservedPkt.")
docsIetfQosServiceClassMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(1522)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxConcatBurst.setDescription("Template for docsIetfQosParamSetMaxConcatBurst.")
docsIetfQosServiceClassNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 9), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassNomPollInterval.setDescription("Template for docsIetfQosParamSetNomPollInterval.")
docsIetfQosServiceClassTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 10), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassTolPollJitter.setDescription("Template for docsIetfQosParamSetTolPollJitter.")
docsIetfQosServiceClassUnsolicitGrantSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassUnsolicitGrantSize.setDescription("Template for docsIetfQosParamSetUnsolicitGrantSize.")
docsIetfQosServiceClassNomGrantInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 12), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassNomGrantInterval.setDescription("Template for docsIetfQosParamSetNomGrantInterval.")
docsIetfQosServiceClassTolGrantJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 13), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassTolGrantJitter.setDescription("Template for docsIetfQosParamSetTolGrantJitter.")
docsIetfQosServiceClassGrantsPerInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassGrantsPerInterval.setDescription("Template for docsIetfQosParamSetGrantsPerInterval.")
docsIetfQosServiceClassMaxLatency = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 15), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxLatency.setDescription("Template for docsIetfQosParamSetClassMaxLatency.")
docsIetfQosServiceClassActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassActiveTimeout.setDescription("Template for docsIetfQosParamSetActiveTimeout.")
docsIetfQosServiceClassAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassAdmittedTimeout.setDescription("Template for docsIetfQosParamSetAdmittedTimeout.")
docsIetfQosServiceClassSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 18), DocsIetfQosSchedulingType().clone('bestEffort')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassSchedulingType.setDescription("Template for docsIetfQosParamSetSchedulingType.")
docsIetfQosServiceClassRequestPolicy = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4).clone(hexValue='00000000')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassRequestPolicy.setDescription("Template for docsIetfQosParamSetRequestPolicyOct.")
docsIetfQosServiceClassTosAndMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 20), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceClassTosAndMask.setDescription("Template for docsIetfQosParamSetTosAndMask.\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of\ndocsIetfQosServiceClassTosAndMask and\ndocsIetfQosServiceClassTosOrMask that would result\nin the modification of the ECN bits.\n\n\n\nIn particular, operators should not use values of\ndocsIetfQosServiceClassTosAndMask that have either\nof the least-significant two bits set to 0.\nSimilarly,operators should not use values of\ndocsIetfQosServiceClassTosOrMask that have either\nof the least-significant two bits set to 1.")
docsIetfQosServiceClassTosOrMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceClassTosOrMask.setDescription("Template for docsIetfQosParamSetTosOrMask.\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of\ndocsIetfQosServiceClassTosAndMask and\ndocsIetfQosServiceClassTosOrMask that would result\nin the modification of the ECN bits.\n\nIn particular, operators should not use values of\ndocsIetfQosServiceClassTosAndMask that have either\nof the least-significant two bits set to 0.\nSimilarly, operators should not use values of\ndocsIetfQosServiceClassTosOrMask that have either\nof the least-significant two bits set to 1.")
docsIetfQosServiceClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 22), DocsIetfQosRfMacIfDirection().clone('upstream')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassDirection.setDescription("Specifies whether the service class template\napplies to upstream or downstream service flows.")
docsIetfQosServiceClassStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 23), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassStorageType.setDescription("This object defines whether this row is kept in\nvolatile storage and lost upon reboot or whether\nit is backed up by non-volatile or permanent\nstorage. 'permanent' entries need not allow\nwritable access to any object.")
docsIetfQosServiceClassDSCPOverwrite = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 24), DscpOrAny().clone('-1')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassDSCPOverwrite.setDescription("This object allows the overwrite of the DSCP\nfield per RFC 3260.\n\nIf this object is -1, then the corresponding entry's\ndocsIetfQosServiceClassTosAndMask value MUST be\n'FF'H and docsIetfQosServiceClassTosOrMask MUST be\n'00'H. Otherwise, this object is in the range of\n0..63, and the corresponding entry's\ndocsIetfQosServiceClassTosAndMask value MUST be\n'03'H and the docsIetfQosServiceClassTosOrMask MUST\nbe this object's value shifted left by two bit\npositions.")
docsIetfQosServiceClassPolicyTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 9))
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyTable.setDescription("This table describes the set of DOCSIS-QOS\nService Class Policies.\n\nThis table is an adjunct to the\n\n\n\ndocsDevFilterPolicy table. Entries in the\ndocsDevFilterPolicy table can point to\nspecific rows in this table.\n\nThis table permits mapping a packet to a service\nclass name of an active service flow so long as\na classifier does not exist at a higher\npriority.")
docsIetfQosServiceClassPolicyEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 9, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyIndex"))
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyEntry.setDescription("A service class name policy entry.")
docsIetfQosServiceClassPolicyIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyIndex.setDescription("Index value to identify an entry in\nthis table uniquely.")
docsIetfQosServiceClassPolicyName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyName.setDescription("Service Class Name to identify the name of the\nservice class flow to which the packet should be\ndirected.")
docsIetfQosServiceClassPolicyRulePriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyRulePriority.setDescription("Service Class Policy rule priority for the\nentry.")
docsIetfQosServiceClassPolicyStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyStatus.setDescription("Used to create or delete rows in this table.\nThis object should not be deleted if it is\nreferenced by an entry in docsDevFilterPolicy.\nThe reference should be deleted first.\nThere is no restriction on the ability\nto change values in this row while the row is\nactive. Inactive rows need not be timed out.")
docsIetfQosServiceClassPolicyStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyStorageType.setDescription("This object defines whether this row is kept in\nvolatile storage and lost upon reboot or whether\nit is backed up by non-volatile or permanent\nstorage. 'permanent' entries need not allow\nwritable access to any object.")
docsIetfQosPHSTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 10))
if mibBuilder.loadTexts: docsIetfQosPHSTable.setDescription("This table describes the set of payload header\nsuppression entries.")
docsIetfQosPHSEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 10, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosPktClassId"))
if mibBuilder.loadTexts: docsIetfQosPHSEntry.setDescription("A payload header suppression entry.\n\nThe ifIndex is an ifType of docsCableMaclayer(127).\nThe index docsIetfQosServiceFlowId selects one\nservice flow from the cable MAC layer interface.\nThe docsIetfQosPktClassId index matches an\nindex of the docsIetfQosPktClassTable.")
docsIetfQosPHSField = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSField.setDescription("Payload header suppression field defines the\nbytes of the header that must be\nsuppressed/restored by the sending/receiving\ndevice.\n\nThe number of octets in this object should be\nthe same as the value of docsIetfQosPHSSize.")
docsIetfQosPHSMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSMask.setDescription("Payload header suppression mask defines the\nbit mask that is used in combination with the\ndocsIetfQosPHSField. It defines which bytes in\nthe header must be suppressed/restored by the\nsending or receiving device.\n\nEach bit of this bit mask corresponds to a byte\nin the docsIetfQosPHSField, with the least\n\n\n\nsignificant bit corresponding to the first byte\nof the docsIetfQosPHSField.\n\nEach bit of the bit mask specifies whether\nthe corresponding byte should be suppressed\nin the packet. A bit value of '1' indicates that\nthe byte should be suppressed by the sending\ndevice and restored by the receiving device.\nA bit value of '0' indicates that\nthe byte should not be suppressed by the sending\ndevice or restored by the receiving device.\n\nIf the bit mask does not contain a bit for each\nbyte in the docsIetfQosPHSField, then the bit mask\nis extended with bit values of '1' to be the\nnecessary length.")
docsIetfQosPHSSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSSize.setDescription("Payload header suppression size specifies the\nnumber of bytes in the header to be suppressed\nand restored.\n\nThe value of this object must match the number\nof bytes in the docsIetfQosPHSField.")
docsIetfQosPHSVerify = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 4), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSVerify.setDescription("Payload header suppression verification value. If\n'true', the sender must verify docsIetfQosPHSField\nis the same as what is contained in the packet\nto be suppressed.")
docsIetfQosPHSIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSIndex.setDescription("Payload header suppression index uniquely\n\n\n\nreferences the PHS rule for a given service flow.")
docsIetfQosCmtsMacToSrvFlowTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 11))
if mibBuilder.loadTexts: docsIetfQosCmtsMacToSrvFlowTable.setDescription("This table provides for referencing the service\nflows associated with a particular cable modem.\nThis allows indexing into other docsIetfQos\ntables that are indexed by docsIetfQosServiceFlowId\nand ifIndex.")
docsIetfQosCmtsMacToSrvFlowEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 11, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosCmtsCmMac"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosCmtsServiceFlowId"))
if mibBuilder.loadTexts: docsIetfQosCmtsMacToSrvFlowEntry.setDescription("An entry is created by CMTS for each service flow\nconnected to this CMTS.")
docsIetfQosCmtsCmMac = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 1), MacAddress()).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosCmtsCmMac.setDescription("The MAC address for the referenced CM.")
docsIetfQosCmtsServiceFlowId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosCmtsServiceFlowId.setDescription("An index assigned to a service flow by CMTS.")
docsIetfQosCmtsIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 3), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosCmtsIfIndex.setDescription("The ifIndex of ifType docsCableMacLayer(127)\non the CMTS that is connected to the Cable Modem.")
docsIetfQosConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2))
docsIetfQosGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2, 1))
docsIetfQosCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2, 2))
# Augmentions
# Groups
docsIetfQosBaseGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 1)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassUserPriLow"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassSourcePortStart"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassEnetProtocol"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpTosMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetDestAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowTimeActive"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowTimeCreated"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassStateActive"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSAReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSCAcks"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetDestMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestPortStart"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetSourceMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSDRsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassPriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSVerify"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSIndex"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSARsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassEnetProtocolType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpTosLow"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetSourceAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSField"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSCReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicChangeFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSDReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestPortEnd"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicAdds"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassVlanId"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicDeleteFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicDeletes"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpProtocol"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowSID"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPHSUnknowns"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPrimary"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSSize"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassSourcePortEnd"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSAAcks"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowOctets"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCRsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassUserPriHigh"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSCRsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPolicedDelayPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPolicedDropPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpTosHigh"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassSourceMacAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestMacMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestMacAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassBitMap"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicAddFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCAcks"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetAddressType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicChanges"), ) )
if mibBuilder.loadTexts: docsIetfQosBaseGroup.setDescription("Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems.")
docsIetfQosParamSetGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 2)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxConcatBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetGrantsPerInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxTrafficRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetActiveTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMinReservedPkt"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetPriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetRequestPolicyOct"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetServiceClassName"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTosOrMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMinReservedRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxTrafficBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetBitMap"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetSchedulingType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTolPollJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTosAndMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxLatency"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTolGrantJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetNomPollInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetNomGrantInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetAdmittedTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetUnsolicitGrantSize"), ) )
if mibBuilder.loadTexts: docsIetfQosParamSetGroup.setDescription("Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems for QOS Parameter Sets.")
docsIetfQosCmtsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 3)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogSFID"), ("DOCS-IETF-QOS-MIB", "docsIetfQosUpstreamFragDiscards"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPolicedDropPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogControl"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogTimeCreated"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogOctets"), ("DOCS-IETF-QOS-MIB", "docsIetfQosUpstreamConcatBursts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogCmMac"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPrimary"), ("DOCS-IETF-QOS-MIB", "docsIetfQosCmtsIfIndex"), ("DOCS-IETF-QOS-MIB", "docsIetfQosUpstreamFragments"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogTimeActive"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogIfIndex"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPolicedDelayPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogServiceClassName"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogTimeDeleted"), ) )
if mibBuilder.loadTexts: docsIetfQosCmtsGroup.setDescription("Group of objects implemented only in the CMTS.")
docsIetfQosSrvClassPolicyGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 4)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyStorageType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyName"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyRulePriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyStatus"), ) )
if mibBuilder.loadTexts: docsIetfQosSrvClassPolicyGroup.setDescription("Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems when supporting policy-based\nservice flows.")
docsIetfQosServiceClassGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 5)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassSchedulingType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassNomGrantInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTolGrantJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassDSCPOverwrite"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassGrantsPerInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxTrafficBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxTrafficRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassStorageType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTolPollJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTosOrMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassStatus"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxConcatBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTosAndMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassUnsolicitGrantSize"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassNomPollInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassRequestPolicy"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMinReservedRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassActiveTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMinReservedPkt"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassAdmittedTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxLatency"), ) )
if mibBuilder.loadTexts: docsIetfQosServiceClassGroup.setDescription("Group of objects implemented only in Cable Modem\nTermination Systems when supporting expansion of Service\nClass Names in a QOS Parameter Set")
# Compliances
docsIetfQosCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 127, 2, 2, 1)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosCmtsGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosSrvClassPolicyGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosBaseGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetGroup"), ) )
if mibBuilder.loadTexts: docsIetfQosCompliance.setDescription("The compliance statement for MCNS Cable Modems and\nCable Modem Termination Systems that implement DOCSIS\nService Flows.")
# Exports
# Module identity
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", PYSNMP_MODULE_ID=docsIetfQosMIB)
# Types
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", DocsIetfQosBitRate=DocsIetfQosBitRate, DocsIetfQosRfMacIfDirection=DocsIetfQosRfMacIfDirection, DocsIetfQosSchedulingType=DocsIetfQosSchedulingType)
# Objects
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosMIB=docsIetfQosMIB, docsIetfQosNotifications=docsIetfQosNotifications, docsIetfQosMIBObjects=docsIetfQosMIBObjects, docsIetfQosPktClassTable=docsIetfQosPktClassTable, docsIetfQosPktClassEntry=docsIetfQosPktClassEntry, docsIetfQosPktClassId=docsIetfQosPktClassId, docsIetfQosPktClassDirection=docsIetfQosPktClassDirection, docsIetfQosPktClassPriority=docsIetfQosPktClassPriority, docsIetfQosPktClassIpTosLow=docsIetfQosPktClassIpTosLow, docsIetfQosPktClassIpTosHigh=docsIetfQosPktClassIpTosHigh, docsIetfQosPktClassIpTosMask=docsIetfQosPktClassIpTosMask, docsIetfQosPktClassIpProtocol=docsIetfQosPktClassIpProtocol, docsIetfQosPktClassInetAddressType=docsIetfQosPktClassInetAddressType, docsIetfQosPktClassInetSourceAddr=docsIetfQosPktClassInetSourceAddr, docsIetfQosPktClassInetSourceMask=docsIetfQosPktClassInetSourceMask, docsIetfQosPktClassInetDestAddr=docsIetfQosPktClassInetDestAddr, docsIetfQosPktClassInetDestMask=docsIetfQosPktClassInetDestMask, docsIetfQosPktClassSourcePortStart=docsIetfQosPktClassSourcePortStart, docsIetfQosPktClassSourcePortEnd=docsIetfQosPktClassSourcePortEnd, docsIetfQosPktClassDestPortStart=docsIetfQosPktClassDestPortStart, docsIetfQosPktClassDestPortEnd=docsIetfQosPktClassDestPortEnd, docsIetfQosPktClassDestMacAddr=docsIetfQosPktClassDestMacAddr, docsIetfQosPktClassDestMacMask=docsIetfQosPktClassDestMacMask, docsIetfQosPktClassSourceMacAddr=docsIetfQosPktClassSourceMacAddr, docsIetfQosPktClassEnetProtocolType=docsIetfQosPktClassEnetProtocolType, docsIetfQosPktClassEnetProtocol=docsIetfQosPktClassEnetProtocol, docsIetfQosPktClassUserPriLow=docsIetfQosPktClassUserPriLow, docsIetfQosPktClassUserPriHigh=docsIetfQosPktClassUserPriHigh, docsIetfQosPktClassVlanId=docsIetfQosPktClassVlanId, docsIetfQosPktClassStateActive=docsIetfQosPktClassStateActive, docsIetfQosPktClassPkts=docsIetfQosPktClassPkts, docsIetfQosPktClassBitMap=docsIetfQosPktClassBitMap, docsIetfQosParamSetTable=docsIetfQosParamSetTable, docsIetfQosParamSetEntry=docsIetfQosParamSetEntry, docsIetfQosParamSetServiceClassName=docsIetfQosParamSetServiceClassName, docsIetfQosParamSetPriority=docsIetfQosParamSetPriority, docsIetfQosParamSetMaxTrafficRate=docsIetfQosParamSetMaxTrafficRate, docsIetfQosParamSetMaxTrafficBurst=docsIetfQosParamSetMaxTrafficBurst, docsIetfQosParamSetMinReservedRate=docsIetfQosParamSetMinReservedRate, docsIetfQosParamSetMinReservedPkt=docsIetfQosParamSetMinReservedPkt, docsIetfQosParamSetActiveTimeout=docsIetfQosParamSetActiveTimeout, docsIetfQosParamSetAdmittedTimeout=docsIetfQosParamSetAdmittedTimeout, docsIetfQosParamSetMaxConcatBurst=docsIetfQosParamSetMaxConcatBurst, docsIetfQosParamSetSchedulingType=docsIetfQosParamSetSchedulingType, docsIetfQosParamSetNomPollInterval=docsIetfQosParamSetNomPollInterval, docsIetfQosParamSetTolPollJitter=docsIetfQosParamSetTolPollJitter, docsIetfQosParamSetUnsolicitGrantSize=docsIetfQosParamSetUnsolicitGrantSize, docsIetfQosParamSetNomGrantInterval=docsIetfQosParamSetNomGrantInterval, docsIetfQosParamSetTolGrantJitter=docsIetfQosParamSetTolGrantJitter, docsIetfQosParamSetGrantsPerInterval=docsIetfQosParamSetGrantsPerInterval, docsIetfQosParamSetTosAndMask=docsIetfQosParamSetTosAndMask, docsIetfQosParamSetTosOrMask=docsIetfQosParamSetTosOrMask, docsIetfQosParamSetMaxLatency=docsIetfQosParamSetMaxLatency, docsIetfQosParamSetType=docsIetfQosParamSetType, docsIetfQosParamSetRequestPolicyOct=docsIetfQosParamSetRequestPolicyOct, docsIetfQosParamSetBitMap=docsIetfQosParamSetBitMap, docsIetfQosServiceFlowTable=docsIetfQosServiceFlowTable, docsIetfQosServiceFlowEntry=docsIetfQosServiceFlowEntry, docsIetfQosServiceFlowId=docsIetfQosServiceFlowId, docsIetfQosServiceFlowSID=docsIetfQosServiceFlowSID, docsIetfQosServiceFlowDirection=docsIetfQosServiceFlowDirection, docsIetfQosServiceFlowPrimary=docsIetfQosServiceFlowPrimary, docsIetfQosServiceFlowStatsTable=docsIetfQosServiceFlowStatsTable, docsIetfQosServiceFlowStatsEntry=docsIetfQosServiceFlowStatsEntry, docsIetfQosServiceFlowPkts=docsIetfQosServiceFlowPkts, docsIetfQosServiceFlowOctets=docsIetfQosServiceFlowOctets, docsIetfQosServiceFlowTimeCreated=docsIetfQosServiceFlowTimeCreated, docsIetfQosServiceFlowTimeActive=docsIetfQosServiceFlowTimeActive, docsIetfQosServiceFlowPHSUnknowns=docsIetfQosServiceFlowPHSUnknowns, docsIetfQosServiceFlowPolicedDropPkts=docsIetfQosServiceFlowPolicedDropPkts, docsIetfQosServiceFlowPolicedDelayPkts=docsIetfQosServiceFlowPolicedDelayPkts, docsIetfQosUpstreamStatsTable=docsIetfQosUpstreamStatsTable, docsIetfQosUpstreamStatsEntry=docsIetfQosUpstreamStatsEntry, docsIetfQosSID=docsIetfQosSID, docsIetfQosUpstreamFragments=docsIetfQosUpstreamFragments, docsIetfQosUpstreamFragDiscards=docsIetfQosUpstreamFragDiscards, docsIetfQosUpstreamConcatBursts=docsIetfQosUpstreamConcatBursts, docsIetfQosDynamicServiceStatsTable=docsIetfQosDynamicServiceStatsTable, docsIetfQosDynamicServiceStatsEntry=docsIetfQosDynamicServiceStatsEntry, docsIetfQosIfDirection=docsIetfQosIfDirection, docsIetfQosDSAReqs=docsIetfQosDSAReqs, docsIetfQosDSARsps=docsIetfQosDSARsps, docsIetfQosDSAAcks=docsIetfQosDSAAcks, docsIetfQosDSCReqs=docsIetfQosDSCReqs, docsIetfQosDSCRsps=docsIetfQosDSCRsps, docsIetfQosDSCAcks=docsIetfQosDSCAcks, docsIetfQosDSDReqs=docsIetfQosDSDReqs, docsIetfQosDSDRsps=docsIetfQosDSDRsps, docsIetfQosDynamicAdds=docsIetfQosDynamicAdds, docsIetfQosDynamicAddFails=docsIetfQosDynamicAddFails, docsIetfQosDynamicChanges=docsIetfQosDynamicChanges, docsIetfQosDynamicChangeFails=docsIetfQosDynamicChangeFails, docsIetfQosDynamicDeletes=docsIetfQosDynamicDeletes, docsIetfQosDynamicDeleteFails=docsIetfQosDynamicDeleteFails, docsIetfQosDCCReqs=docsIetfQosDCCReqs, docsIetfQosDCCRsps=docsIetfQosDCCRsps, docsIetfQosDCCAcks=docsIetfQosDCCAcks, docsIetfQosDCCs=docsIetfQosDCCs, docsIetfQosDCCFails=docsIetfQosDCCFails, docsIetfQosServiceFlowLogTable=docsIetfQosServiceFlowLogTable, docsIetfQosServiceFlowLogEntry=docsIetfQosServiceFlowLogEntry, docsIetfQosServiceFlowLogIndex=docsIetfQosServiceFlowLogIndex, docsIetfQosServiceFlowLogIfIndex=docsIetfQosServiceFlowLogIfIndex, docsIetfQosServiceFlowLogSFID=docsIetfQosServiceFlowLogSFID, docsIetfQosServiceFlowLogCmMac=docsIetfQosServiceFlowLogCmMac, docsIetfQosServiceFlowLogPkts=docsIetfQosServiceFlowLogPkts, docsIetfQosServiceFlowLogOctets=docsIetfQosServiceFlowLogOctets, docsIetfQosServiceFlowLogTimeDeleted=docsIetfQosServiceFlowLogTimeDeleted, docsIetfQosServiceFlowLogTimeCreated=docsIetfQosServiceFlowLogTimeCreated, docsIetfQosServiceFlowLogTimeActive=docsIetfQosServiceFlowLogTimeActive, docsIetfQosServiceFlowLogDirection=docsIetfQosServiceFlowLogDirection, docsIetfQosServiceFlowLogPrimary=docsIetfQosServiceFlowLogPrimary, docsIetfQosServiceFlowLogServiceClassName=docsIetfQosServiceFlowLogServiceClassName, docsIetfQosServiceFlowLogPolicedDropPkts=docsIetfQosServiceFlowLogPolicedDropPkts, docsIetfQosServiceFlowLogPolicedDelayPkts=docsIetfQosServiceFlowLogPolicedDelayPkts, docsIetfQosServiceFlowLogControl=docsIetfQosServiceFlowLogControl, docsIetfQosServiceClassTable=docsIetfQosServiceClassTable, docsIetfQosServiceClassEntry=docsIetfQosServiceClassEntry, docsIetfQosServiceClassName=docsIetfQosServiceClassName, docsIetfQosServiceClassStatus=docsIetfQosServiceClassStatus, docsIetfQosServiceClassPriority=docsIetfQosServiceClassPriority, docsIetfQosServiceClassMaxTrafficRate=docsIetfQosServiceClassMaxTrafficRate, docsIetfQosServiceClassMaxTrafficBurst=docsIetfQosServiceClassMaxTrafficBurst, docsIetfQosServiceClassMinReservedRate=docsIetfQosServiceClassMinReservedRate, docsIetfQosServiceClassMinReservedPkt=docsIetfQosServiceClassMinReservedPkt, docsIetfQosServiceClassMaxConcatBurst=docsIetfQosServiceClassMaxConcatBurst)
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosServiceClassNomPollInterval=docsIetfQosServiceClassNomPollInterval, docsIetfQosServiceClassTolPollJitter=docsIetfQosServiceClassTolPollJitter, docsIetfQosServiceClassUnsolicitGrantSize=docsIetfQosServiceClassUnsolicitGrantSize, docsIetfQosServiceClassNomGrantInterval=docsIetfQosServiceClassNomGrantInterval, docsIetfQosServiceClassTolGrantJitter=docsIetfQosServiceClassTolGrantJitter, docsIetfQosServiceClassGrantsPerInterval=docsIetfQosServiceClassGrantsPerInterval, docsIetfQosServiceClassMaxLatency=docsIetfQosServiceClassMaxLatency, docsIetfQosServiceClassActiveTimeout=docsIetfQosServiceClassActiveTimeout, docsIetfQosServiceClassAdmittedTimeout=docsIetfQosServiceClassAdmittedTimeout, docsIetfQosServiceClassSchedulingType=docsIetfQosServiceClassSchedulingType, docsIetfQosServiceClassRequestPolicy=docsIetfQosServiceClassRequestPolicy, docsIetfQosServiceClassTosAndMask=docsIetfQosServiceClassTosAndMask, docsIetfQosServiceClassTosOrMask=docsIetfQosServiceClassTosOrMask, docsIetfQosServiceClassDirection=docsIetfQosServiceClassDirection, docsIetfQosServiceClassStorageType=docsIetfQosServiceClassStorageType, docsIetfQosServiceClassDSCPOverwrite=docsIetfQosServiceClassDSCPOverwrite, docsIetfQosServiceClassPolicyTable=docsIetfQosServiceClassPolicyTable, docsIetfQosServiceClassPolicyEntry=docsIetfQosServiceClassPolicyEntry, docsIetfQosServiceClassPolicyIndex=docsIetfQosServiceClassPolicyIndex, docsIetfQosServiceClassPolicyName=docsIetfQosServiceClassPolicyName, docsIetfQosServiceClassPolicyRulePriority=docsIetfQosServiceClassPolicyRulePriority, docsIetfQosServiceClassPolicyStatus=docsIetfQosServiceClassPolicyStatus, docsIetfQosServiceClassPolicyStorageType=docsIetfQosServiceClassPolicyStorageType, docsIetfQosPHSTable=docsIetfQosPHSTable, docsIetfQosPHSEntry=docsIetfQosPHSEntry, docsIetfQosPHSField=docsIetfQosPHSField, docsIetfQosPHSMask=docsIetfQosPHSMask, docsIetfQosPHSSize=docsIetfQosPHSSize, docsIetfQosPHSVerify=docsIetfQosPHSVerify, docsIetfQosPHSIndex=docsIetfQosPHSIndex, docsIetfQosCmtsMacToSrvFlowTable=docsIetfQosCmtsMacToSrvFlowTable, docsIetfQosCmtsMacToSrvFlowEntry=docsIetfQosCmtsMacToSrvFlowEntry, docsIetfQosCmtsCmMac=docsIetfQosCmtsCmMac, docsIetfQosCmtsServiceFlowId=docsIetfQosCmtsServiceFlowId, docsIetfQosCmtsIfIndex=docsIetfQosCmtsIfIndex, docsIetfQosConformance=docsIetfQosConformance, docsIetfQosGroups=docsIetfQosGroups, docsIetfQosCompliances=docsIetfQosCompliances)
# Groups
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosBaseGroup=docsIetfQosBaseGroup, docsIetfQosParamSetGroup=docsIetfQosParamSetGroup, docsIetfQosCmtsGroup=docsIetfQosCmtsGroup, docsIetfQosSrvClassPolicyGroup=docsIetfQosSrvClassPolicyGroup, docsIetfQosServiceClassGroup=docsIetfQosServiceClassGroup)
# Compliances
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosCompliance=docsIetfQosCompliance)
|
flexible
|
{
"blob_id": "b90678c8f7ad9b97e13e5603bdf1dc8cb3511ca5",
"index": 5432,
"step-1": "<mask token>\n\n\nclass DocsIetfQosRfMacIfDirection(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)\n namedValues = NamedValues(('downstream', 1), ('upstream', 2))\n\n\nclass DocsIetfQosSchedulingType(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)\n namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (\n 'nonRealTimePollingService', 3), ('realTimePollingService', 4), (\n 'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass DocsIetfQosBitRate(TextualConvention, Unsigned32):\n <mask token>\n\n\nclass DocsIetfQosRfMacIfDirection(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)\n namedValues = NamedValues(('downstream', 1), ('upstream', 2))\n\n\nclass DocsIetfQosSchedulingType(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)\n namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (\n 'nonRealTimePollingService', 3), ('realTimePollingService', 4), (\n 'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass DocsIetfQosBitRate(TextualConvention, Unsigned32):\n displayHint = 'd'\n\n\nclass DocsIetfQosRfMacIfDirection(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)\n namedValues = NamedValues(('downstream', 1), ('upstream', 2))\n\n\nclass DocsIetfQosSchedulingType(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)\n namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (\n 'nonRealTimePollingService', 3), ('realTimePollingService', 4), (\n 'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass DocsIetfQosBitRate(TextualConvention, Unsigned32):\n displayHint = 'd'\n\n\nclass DocsIetfQosRfMacIfDirection(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(2, 1)\n namedValues = NamedValues(('downstream', 1), ('upstream', 2))\n\n\nclass DocsIetfQosSchedulingType(Integer):\n subtypeSpec = Integer.subtypeSpec + SingleValueConstraint(3, 1, 5, 6, 2, 4)\n namedValues = NamedValues(('undefined', 1), ('bestEffort', 2), (\n 'nonRealTimePollingService', 3), ('realTimePollingService', 4), (\n 'unsolictedGrantServiceWithAD', 5), ('unsolictedGrantService', 6))\n\n\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosMIB.setOrganization(\n 'IETF IP over Cable Data Network (IPCDN)\\nWorking Group')\nif mibBuilder.loadTexts:\n docsIetfQosMIB.setContactInfo(\n \"\"\"\nCo-Author: Michael Patrick\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7563\nE-mail: [email protected]\n\nCo-Author: William Murwin\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7594\nE-mail: [email protected]\n\nIETF IPCDN Working Group\nGeneral Discussion: [email protected]\nSubscribe: http://www.ietf.org/mailman/listinfo/ipcdn\nArchive: ftp://ftp.ietf.org/ietf-mail-archive/ipcdn\nCo-chairs: Richard Woundy, [email protected]\n Jean-Francois Mule, [email protected]\"\"\"\n )\nif mibBuilder.loadTexts:\n docsIetfQosMIB.setDescription(\n \"\"\"This is the management information for\nQuality Of Service (QOS) for DOCSIS 1.1 and 2.0.\n\n\n\nCopyright (C) The Internet Society (2006). This version of\nthis MIB module is part of RFC 4323; see the RFC itself for\nfull legal notices.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassTable.setDescription(\n \"\"\"This table describes the packet classification\nconfigured on the CM or CMTS.\nThe model is that a packet either received\nas input from an interface or transmitted\nfor output on an interface may be compared\nagainst an ordered list of rules pertaining to\nthe packet contents. Each rule is a row of this\ntable. A matching rule provides a Service Flow\nID to which the packet is classified.\nAll rules need to match for a packet to match\na classifier.\n\nThe objects in this row correspond to a set of\nClassifier Encoding parameters in a DOCSIS\nMAC management message. The\ndocsIetfQosPktClassBitMap indicates which\nparticular parameters were present in the\nclassifier as signaled in the DOCSIS message.\nIf the referenced parameter was not present\nin the signaled DOCSIS 1.1 and 2.0 Classifier, the\ncorresponding object in this row reports a\nvalue as specified in the DESCRIPTION section.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassEntry.setDescription(\n \"\"\"An entry in this table provides a single packet\nclassifier rule. The index ifIndex is an ifType\nof docsCableMaclayer(127).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassId.setDescription(\n \"\"\"Index assigned to packet classifier entry by\nthe CMTS, which is unique per Service Flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassDirection.setDescription(\n \"\"\"Indicates the direction to which the classifier\nis applied.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassPriority.setDescription(\n \"\"\"The value specifies the order of evaluation\nof the classifiers.\n\nThe higher the value, the higher the priority.\nThe value of 0 is used as default in\nprovisioned Service Flows Classifiers.\nThe default value of 64 is used for dynamic\nService Flow Classifiers.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the default\nvalue as defined above.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassIpTosLow.setDescription(\n \"\"\"The low value of a range of TOS byte values.\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet, as originally defined in RFC 791,\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as per the DOCSIS Specification\nfor packet classification.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassIpTosHigh.setDescription(\n \"\"\"The 8-bit high value of a range of TOS byte\nvalues.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the\nvalue of 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as defined by the DOCSIS Specification\nfor packet classification.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassIpTosMask.setDescription(\n \"\"\"The mask value is bitwise ANDed with TOS byte\nin an IP packet, and this value is used for\nrange checking of TosLow and TosHigh.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet per the DOCSIS Specification for packet\nclassification.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassIpProtocol.setDescription(\n \"\"\"This object indicates the value of the IP\nProtocol field required for IP packets to match\nthis rule.\n\n\n\n\nThe value 256 matches traffic with any IP Protocol\nvalue. The value 257 by convention matches both TCP\nand UDP.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 258.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassInetAddressType.setDescription(\n \"\"\"The type of the Internet address for\ndocsIetfQosPktClassInetSourceAddr,\ndocsIetfQosPktClassInetSourceMask,\ndocsIetfQosPktClassInetDestAddr, and\ndocsIetfQosPktClassInetDestMask.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\nipv4(1).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassInetSourceAddr.setDescription(\n \"\"\"This object specifies the value of the IP\nSource Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Source Address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassInetSourceMask.setDescription(\n \"\"\"This object specifies which bits of a packet's\nIP Source Address are compared to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nsource address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosIpPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassInetDestAddr.setDescription(\n \"\"\"This object specifies the value of the IP\nDestination Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Destination Address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value\nequals the docsIetfQosPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassInetDestMask.setDescription(\n \"\"\"This object specifies which bits of a packet's\nIP Destination Address are compared to\nmatch this rule.\n\nAn IP packet matches the rule when the packet\ndestination address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value equals the\ndocsIetfQosIpPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassSourcePortStart.setDescription(\n \"\"\"This object specifies the low-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassSourcePortEnd.setDescription(\n \"\"\"This object specifies the high-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassDestPortStart.setDescription(\n \"\"\"This object specifies the low-end inclusive\nrange of TCP/UDP destination port numbers to\nwhich a packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassDestPortEnd.setDescription(\n \"\"\"This object specifies the high-end inclusive\nrange of TCP/UDP destination port numbers to which\na packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassDestMacAddr.setDescription(\n \"\"\"An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassDestMacMask.setDescription(\n \"\"\"An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassSourceMacAddr.setDescription(\n \"\"\"An Ethernet packet matches this entry when its\nsource MAC address equals the value of\nthis object.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFFFFFF'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassEnetProtocolType.setDescription(\n \"\"\"This object indicates the format of the layer 3\nprotocol ID in the Ethernet packet. A value of\nnone(0) means that the rule does not use the\nlayer 3 protocol type as a matching criteria.\n\nA value of ethertype(1) means that the rule\napplies only to frames that contain an\nEtherType value. Ethertype values are contained\nin packets using the Dec-Intel-Xerox (DIX)\nencapsulation or the RFC1042 Sub-Network Access\nProtocol (SNAP) encapsulation formats.\n\nA value of dsap(2) means that the rule applies\n\n\n\nonly to frames using the IEEE802.3\nencapsulation format with a Destination Service\nAccess Point (DSAP) other\nthan 0xAA (which is reserved for SNAP).\n\nA value of mac(3) means that the rule applies\nonly to MAC management messages for MAC management\nmessages.\n\nA value of all(4) means that the rule matches\nall Ethernet packets.\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object\napplies to the embedded EtherType field within\nthe 802.1P/Q header.\n\nIf the referenced parameter is not present in a\nclassifier, this object reports the value of 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassEnetProtocol.setDescription(\n \"\"\"If docsIetfQosEthPktClassProtocolType is none(0),\nthis object is ignored when considering whether\na packet matches the current rule.\n\nIf dosQosPktClassEnetProtocolType is ethertype(1),\nthis object gives the 16-bit value of the\nEtherType that the packet must match in order to\nmatch the rule.\n\nIf docsIetfQosPktClassEnetProtocolType is dsap(2),\nthe lower 8 bits of this object's value must match\nthe DSAP byte of the packet in order to match the\nrule.\n\nIf docsIetfQosPktClassEnetProtocolType is mac(3),\nthe lower 8 bits of this object's value represent a\nlower bound (inclusive) of MAC management message\ntype codes matched, and the upper 8 bits represent\nthe upper bound (inclusive) of matched MAC message\ntype codes. Certain message type codes are\nexcluded from matching, as specified in the\nreference.\n\n\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object applies\nto the embedded EtherType field within the 802.1P/Q\nheader.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassUserPriLow.setDescription(\n \"\"\"This object applies only to Ethernet frames\nusing the 802.1P/Q tag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassUserPriHigh.setDescription(\n \"\"\"This object applies only to Ethernet frames\nusing the 802.1P/Qtag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\n\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 7.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassVlanId.setDescription(\n \"\"\"This object applies only to Ethernet frames\nusing the 802.1P/Q tag header.\n\nTagged packets must have a VLAN Identifier that\nmatches the value in order to match the rule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassStateActive.setDescription(\n \"\"\"This object indicates whether or not the classifier\nis enabled to classify packets to a Service Flow.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas true(1).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassPkts.setDescription(\n \"\"\"This object counts the number of packets that have\nbeen classified using this entry. This\nincludes all packets delivered to a Service Flow\nmaximum rate policing function, whether or not that\nfunction drops the packets.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPktClassBitMap.setDescription(\n \"\"\"This object indicates which parameter encodings\nwere actually present in the DOCSIS packet\nclassifier encoding signaled in the DOCSIS message\nthat created or modified the classifier. Note that\nDynamic Service Change messages have replace\nsemantics, so that all non-default parameters must\nbe present whether the classifier is being created\nor changed.\n\nA bit of this object is set to 1 if the parameter\nindicated by the comment was present in the\nclassifier encoding, and to 0 otherwise.\n\nNote that BITS are encoded most significant bit\nfirst, so that if, for example, bits 6 and 7 are\nset, this object is encoded as the octet string\n'030000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetTable.setDescription(\n \"\"\"This table describes the set of DOCSIS 1.1 and 2.0\nQOS parameters defined in a managed device.\n\nThe ifIndex index specifies a DOCSIS MAC Domain.\nThe docsIetfQosServiceFlowId index specifies a\nparticular Service Flow.\nThe docsIetfQosParamSetType index indicates whether\nthe active, admitted, or provisioned QOS Parameter\nSet is being described by the row.\n\nOnly the QOS Parameter Sets of DOCSIS 1.1 and 2.0\nService Flows are represented in this table.\n\nDOCSIS 1.0 QOS service profiles are not\nrepresented in this table.\n\nEach row corresponds to a DOCSIS QOS Parameter Set\nas signaled via DOCSIS MAC management messages.\nEach object in the row corresponds to one or\npart of one DOCSIS 1.1 Service Flow Encoding.\nThe docsIetfQosParamSetBitMap object in the row\nindicates which particular parameters were signaled\nin the original registration or dynamic service\nrequest message that created the QOS Parameter Set.\n\nIn many cases, even if a QOS Parameter Set parameter\nwas not signaled, the DOCSIS specification calls\nfor a default value to be used. That default value\nis reported as the value of the corresponding object\nin this row.\n\nMany objects are not applicable, depending on\nthe Service Flow direction or upstream scheduling\ntype. The object value reported in this case\nis specified in the DESCRIPTION clause.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetEntry.setDescription('A unique set of QOS parameters.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetServiceClassName.setDescription(\n \"\"\"Refers to the Service Class Name from which the\nparameter set values were derived.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is a zero-length string.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetPriority.setDescription(\n \"\"\"The relative priority of a Service Flow.\nHigher numbers indicate higher priority.\nThis priority should only be used to differentiate\n\n\n\nService Flow from identical parameter sets.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, the reported value is 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMaxTrafficRate.setDescription(\n \"\"\"Maximum sustained traffic rate allowed for this\nService Flow in bits/sec. Must count all MAC frame\ndata PDU from the bytes following the MAC header\nHCS to the end of the CRC. The number of bytes\nforwarded is limited during any time interval.\nThe value 0 means no maximum traffic rate is\nenforced. This object applies to both upstream and\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, it is reported as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMaxTrafficBurst.setDescription(\n \"\"\"Specifies the token bucket size in bytes\nfor this parameter set. The value is calculated\nfrom the byte following the MAC header HCS to\nthe end of the CRC. This object is applied in\nconjunction with docsIetfQosParamSetMaxTrafficRate\nto calculate maximum sustained traffic rate.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort (2), nonRealTimePollingService(3),\nand realTimePollingService(4) is 3044.\n\nIf this parameter is not applicable, it is reported\nas 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMinReservedRate.setDescription(\n \"\"\"Specifies the guaranteed minimum rate in\nbits/sec for this parameter set. The value is\ncalculated from the byte following the MAC\nheader HCS to the end of the CRC. The default\nvalue of 0 means that no bandwidth is reserved.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter\nis not applicable, it is reported as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMinReservedPkt.setDescription(\n \"\"\"Specifies an assumed minimum packet size in\nbytes for which the\ndocsIetfQosParamSetMinReservedRate will be\nprovided. The value is calculated from the byte\nfollowing the MAC header HCS to the end of the\nCRC.\n\nIf the referenced parameter is omitted from a\nDOCSIS QOS parameter set, the default value is\nCMTS implementation dependent. In this case, the\nCMTS reports the default value it is using, and the\nCM reports a value of 0. If the referenced\nparameter is not applicable to the direction or\nscheduling type of the Service Flow, both CMTS and\nCM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetActiveTimeout.setDescription(\n \"\"\"Specifies the maximum duration in seconds that\nresources remain unused on an active service\nflow before CMTS signals that both active and\nadmitted parameters set are null. The default\nvalue of 0 signifies an infinite amount of time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetAdmittedTimeout.setDescription(\n \"\"\"Specifies the maximum duration in seconds that\nresources remain in admitted state before\nresources must be released.\n\nThe value of 0 signifies an infinite amount\nof time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the\ndefault value of this object is 200.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMaxConcatBurst.setDescription(\n \"\"\"Specifies the maximum concatenated burst in\nbytes that an upstream Service Flow is allowed.\nThe value is calculated from the FC byte of the\nConcatenation MAC Header to the last CRC byte in\nof the last concatenated MAC frame, inclusive.\nThe value of 0 specifies no maximum burst.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort(2), nonRealTimePollingService(3), and\n\n\n\nrealTimePollingService(4) is 1522. If the parameter\nis not applicable, this object's value is reported\nas 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetSchedulingType.setDescription(\n \"\"\"Specifies the upstream scheduling service used for\nupstream Service Flow.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set of an\nupstream Service Flow, the default value of this\nobject is bestEffort(2). For QOS parameter sets of\ndownstream Service Flows, this object's value is\nreported as undefined(1).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetNomPollInterval.setDescription(\n \"\"\"Specifies the nominal interval in microseconds\nbetween successive unicast request\nopportunities on an upstream Service Flow.\n\nThis object applies only to upstream Service Flows\nwith DocsIetfQosSchedulingType of value\nnonRealTimePollingService(3),\nrealTimePollingService(4), and\nunsolictedGrantServiceWithAD(5). The parameter is\nmandatory for realTimePollingService(4). If the\nparameter is omitted with\nnonRealTimePollingService(3), the CMTS uses an\nimplementation-dependent value. If the parameter\nis omitted with unsolictedGrantServiceWithAD(5),\nthe CMTS uses as a default value the value of the\nNominal Grant Interval parameter. In all cases,\nthe CMTS reports the value it is using when the\nparameter is applicable. The CM reports the\nsignaled parameter value if it was signaled,\nand 0 otherwise.\n\n\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetTolPollJitter.setDescription(\n \"\"\"Specifies the maximum amount of time in\nmicroseconds that the unicast request interval\nmay be delayed from the nominal periodic\nschedule on an upstream Service Flow.\n\nThis parameter is applicable only to upstream\nService Flows with a DocsIetfQosSchedulingType of\nrealTimePollingService(4) or\nunsolictedGrantServiceWithAD(5).\n\nIf the referenced parameter is applicable but not\npresent in the corresponding DOCSIS QOS Parameter\nSet, the CMTS uses an implementation-dependent\nvalue and reports the value it is using.\nThe CM reports a value of 0 in this case.\n\nIf the parameter is not applicable to the\ndirection or upstream scheduling type of the\nService Flow, both CMTS and CM report this\nobject's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetUnsolicitGrantSize.setDescription(\n \"\"\"Specifies the unsolicited grant size in bytes.\nThe grant size includes the entire MAC frame\ndata PDU from the Frame Control byte to the end\nof the MAC frame.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\n\n\n\nwhen applicable. Both CMTS and CM report\nthe signaled value of the parameter in this\ncase.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetNomGrantInterval.setDescription(\n \"\"\"Specifies the nominal interval in microseconds\nbetween successive data grant opportunities\non an upstream Service Flow.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetTolGrantJitter.setDescription(\n \"\"\"Specifies the maximum amount of time in\nmicroseconds that the transmission opportunities\nmay be delayed from the nominal periodic schedule.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\n\n\n\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetGrantsPerInterval.setDescription(\n \"\"\"Specifies the number of data grants per Nominal\nGrant Interval\n(docsIetfQosParamSetNomGrantInterval).\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetTosAndMask.setDescription(\n \"\"\"Specifies the AND mask for the IP TOS byte for\noverwriting IP packet's TOS value. The IP packet\nTOS byte is bitwise ANDed with\ndocsIetfQosParamSetTosAndMask, and the result is\nbitwise ORed with docsIetfQosParamSetTosORMask and\nthe result is written to the IP packet TOS byte.\nA value of 'FF'H for docsIetfQosParamSetTosAndMask\nand a value of '00'H for\ndocsIetfQosParamSetTosOrMask means that the IP\nPacket TOS byte is not overwritten.\n\nThis combination is reported if the referenced\nparameter is not present in a QOS Parameter Set.\n\n\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of docsIetfQosParamSetTosAndMask\nand docsIetfQosParamSetTosORMask that would result\nin the modification of the ECN bits.\n\nIn particular, operators should not use values of\ndocsIetfQosParamSetTosAndMask that have either of\nthe least-significant two bits set to 0. Similarly,\noperators should not use values of\ndocsIetfQosParamSetTosORMask that have either of\nthe least-significant two bits set to 1.\n\nEven though this object is only enforced by the\nCable Modem Termination System (CMTS),\nCable Modems MUST report the value as signaled in\nthe referenced parameter.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetTosOrMask.setDescription(\n \"\"\"Specifies the OR mask for the IP TOS byte.\n\nSee the description of docsIetfQosParamSetTosAndMask\nfor further details.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of docsIetfQosParamSetTosAndMask\nand docsIetfQosParamSetTosORMask that would result\nin the modification of the ECN bits.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetMaxLatency.setDescription(\n \"\"\"Specifies the maximum latency between the\nreception of a packet by the CMTS on its NSI\nand the forwarding of the packet to the RF\ninterface. A value of 0 signifies no maximum\nlatency is enforced. This object only applies to\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding downstream DOCSIS QOS Parameter Set,\nthe default value is 0. This parameter is\nnot applicable to upstream DOCSIS QOS Parameter\nSets, and its value is reported as 0 in this case.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetType.setDescription(\n \"\"\"Defines the type of the QOS parameter set defined\nby this row. active(1) indicates the Active QOS\nparameter set, describing the service currently\nbeing provided by the DOCSIS MAC domain to the\nService Flow. admitted(2) indicates the Admitted\nQOS Parameter Set, describing services reserved by\nthe DOCSIS MAC domain for use by the service\nflow. provisioned (3) describes the QOS Parameter\nSet defined in the DOCSIS CM Configuration file for\nthe Service Flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetRequestPolicyOct.setDescription(\n \"\"\"Specifies which transmit interval opportunities\nthe CM omits for upstream transmission requests and\npacket transmissions. This object takes its\ndefault value for downstream Service Flows.\n\nUnless otherwise indicated, a bit value of 1 means\nthat a CM must not use that opportunity for\nupstream transmission.\n\nIf bit 0 is the least significant bit of the\nleast significant (4th) octet, and if bit number\nis increased with significance, the bit definitions\nare defined as follows:\n\nbroadcastReqOpp(0):\n all CMs broadcast request opportunities\n\npriorityReqMulticastReq(1):\n priority request multicast request\n opportunities\n\nreqDataForReq(2):\n request/data opportunities for requests\n\nreqDataForData(3):\n request/data opportunities for data\n\npiggybackReqWithData(4):\n piggyback requests with data\n\nconcatenateData(5):\n concatenate data\n\nfragmentData(6):\n fragment data\n\nsuppresspayloadheaders(7):\n suppress payload headers\n\n\n\n\ndropPktsExceedUGSize(8):\n A value of 1 means that the Service Flow must\n drop packets that do not fit in the Unsolicited\n Grant size.\n\nIf the referenced parameter is not present in\na QOS Parameter Set, the value of this object is\nreported as '00000000'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetBitMap.setDescription(\n \"\"\"This object indicates the set of QOS Parameter\nSet parameters actually signaled in the\nDOCSIS registration or dynamic service request\nmessage that created or modified the QOS Parameter\nSet. A bit is set to 1 when the parameter described\nby the indicated reference section is present\nin the original request.\n\nNote that when Service Class names are expanded,\nthe registration or dynamic response message may\ncontain parameters as expanded by the CMTS based\n\n\n\non a stored service class. These expanded\nparameters are not indicated by a 1 bit in this\nobject.\n\nNote that even though some QOS Parameter Set\nparameters may not be signaled in a message\n(so that the paramater's bit in this object is 0),\nthe DOCSIS specification requires that default\nvalues be used. These default values are reported\nas the corresponding object's value in the row.\n\nNote that BITS objects are encoded most\nsignificant bit first. For example, if bits\n1 and 16 are set, the value of this object\nis the octet string '400080'H.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowTable.setDescription(\n \"\"\"This table describes the set of DOCSIS-QOS\nService Flows in a managed device.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowEntry.setDescription(\n \"\"\"Describes a Service Flow.\nAn entry in the table exists for each\nService Flow ID. The ifIndex is an\nifType of docsCableMaclayer(127).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowId.setDescription(\n 'An index assigned to a Service Flow by CMTS.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowSID.setDescription(\n \"\"\"Service Identifier (SID) assigned to an\nadmitted or active Service Flow. This object\nreports a value of 0 if a Service ID is not\nassociated with the Service Flow. Only active\nor admitted upstream Service Flows will have a\nService ID (SID).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowDirection.setDescription(\n 'The direction of the Service Flow.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowPrimary.setDescription(\n \"\"\"Object reflects whether Service Flow is the primary\nor a secondary Service Flow.\n\nA primary Service Flow is the default Service Flow\nfor otherwise unclassified traffic and all MAC\nmessages.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowStatsTable.setDescription(\n \"\"\"This table describes statistics associated with the\nService Flows in a managed device.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowStatsEntry.setDescription(\n \"\"\"Describes a set of Service Flow statistics.\nAn entry in the table exists for each\nService Flow ID. The ifIndex is an\nifType of docsCableMaclayer(127).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowPkts.setDescription(\n \"\"\"For outgoing Service Flows, this object counts the\nnumber of Packet Data PDUs forwarded to this\nService Flow. For incoming upstream CMTS service\nflows, this object counts the number of Packet\nData PDUs actually received on the Service Flow\nidentified by the SID for which the packet was\nscheduled. CMs not classifying downstream packets\nmay report this object's value as 0 for downstream\nService Flows. This object does not count\nMAC-specific management messages.\n\nParticularly for UGS flows, packets sent on the\nprimary Service Flow in violation of the UGS grant\nsize should be counted only by the instance of this\nobject that is associated with the primary service\n\n\n\nflow.\n\nUnclassified upstream user data packets (i.e., non-\nMAC-management) forwarded to the primary upstream\nService Flow should be counted by the instance of\nthis object that is associated with the primary\nservice flow.\n\nThis object does include packets counted by\ndocsIetfQosServiceFlowPolicedDelayPkts, but does not\ninclude packets counted by\ndocsIetfQosServiceFlowPolicedDropPkts\nand docsIetfQosServiceFlowPHSUnknowns.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowOctets.setDescription(\n \"\"\"The number of octets from the byte after the MAC\nheader HCS to the end of the CRC for all packets\ncounted in the docsIetfQosServiceFlowPkts object for\nthis row. Note that this counts the octets after\npayload header suppression and before payload\nheader expansion have been applied.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowTimeCreated.setDescription(\n \"\"\"The value of sysUpTime when the service flow\nwas created.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowTimeActive.setDescription(\n \"\"\"The number of seconds that the service flow\nhas been active.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowPHSUnknowns.setDescription(\n \"\"\"For incoming upstream CMTS service flows, this\nobject counts the number of packets received\nwith an unknown payload header suppression index.\nThe service flow is identified by the SID for which\nthe packet was scheduled.\n\nOn a CM, only this object's instance for the primary\ndownstream service flow counts packets received with\nan unknown payload header suppression index. All\nother downstream service flows on CM report this\nobjects value as 0.\n\nAll outgoing service flows report this object's\nvalue as 0.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowPolicedDropPkts.setDescription(\n \"\"\"For outgoing service flows, this object counts the\nnumber of Packet Data PDUs classified to this\nservice flow dropped due to:\n (1) implementation-dependent excessive delay\n while enforcing the Maximum Sustained\n Traffic Rate; or\n (2) UGS packets dropped due to exceeding the\n Unsolicited Grant Size with a\n Request/Transmission policy that requires\n such packets to be dropped.\n\nClassified packets dropped due to other reasons\n\n\n\nmust be counted in ifOutDiscards for the interface\nof this service flow. This object reports 0 for\nincoming service flows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowPolicedDelayPkts.setDescription(\n \"\"\"This object counts only outgoing packets delayed in\norder to maintain the Maximum Sustained Traffic\nRate. This object will always report a value of 0\nfor UGS flows because the Maximum Sustained Traffic\nRate does not apply. This object is 0 for incoming\nservice flows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosUpstreamStatsTable.setDescription(\n \"\"\"This table describes statistics associated with\nupstream service flows. All counted frames must\nbe received without a Frame Check Sequence (FCS)\nerror.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosUpstreamStatsEntry.setDescription(\n \"\"\"Describes a set of upstream service flow\nstatistics. An entry in the table exists for each\nupstream Service Flow in a managed device.\nThe ifIndex is an ifType of\ndocsCableMaclayer(127).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosSID.setDescription(\n \"\"\"Identifies a service ID for an admitted or active\nupstream service flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosUpstreamFragments.setDescription(\n \"\"\"The number of fragmentation headers received on an\nupstream service flow, regardless of whether\nthe fragment was correctly reassembled into a\nvalid packet.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosUpstreamFragDiscards.setDescription(\n \"\"\"The number of upstream fragments discarded and not\nassembled into a valid upstream packet.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosUpstreamConcatBursts.setDescription(\n \"\"\"The number of concatenation headers received on an\nupstream service flow.\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicServiceStatsTable.setDescription(\n \"\"\"This table describes statistics associated with the\nDynamic Service Flows in a managed device.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicServiceStatsEntry.setDescription(\n \"\"\"Describes a set of dynamic service flow statistics.\nTwo entries exist for each DOCSIS MAC layer\ninterface for the upstream and downstream\ndirection. On the CMTS, the downstream direction\nrow indicates messages transmitted or transactions\noriginated by the CMTS. The upstream direction row\nindicates messages received or transaction\noriginated by the CM. On the CM, the downstream\ndirection row indicates messages received or\ntransactions originated by the CMTS. The upstream\ndirection row indicates messages transmitted by\nthe CM or transactions originated by the CM.\nThe ifIndex is an ifType of\ndocsCableMaclayer(127).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosIfDirection.setDescription('The direction of interface.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSAReqs.setDescription(\n \"\"\"The number of Dynamic Service Addition Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSARsps.setDescription(\n \"\"\"The number of Dynamic Service Addition Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSAAcks.setDescription(\n \"\"\"The number of Dynamic Service Addition\nAcknowledgements, including retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSCReqs.setDescription(\n \"\"\"The number of Dynamic Service Change Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSCRsps.setDescription(\n \"\"\"The number of Dynamic Service Change Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSCAcks.setDescription(\n \"\"\"The number of Dynamic Service Change\nAcknowledgements, including retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSDReqs.setDescription(\n \"\"\"The number of Dynamic Service Delete Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDSDRsps.setDescription(\n \"\"\"The number of Dynamic Service Delete Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicAdds.setDescription(\n \"\"\"The number of successful Dynamic Service Addition\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicAddFails.setDescription(\n \"\"\"The number of failed Dynamic Service Addition\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicChanges.setDescription(\n \"\"\"The number of successful Dynamic Service Change\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicChangeFails.setDescription(\n \"\"\"The number of failed Dynamic Service Change\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicDeletes.setDescription(\n \"\"\"The number of successful Dynamic Service Delete\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDynamicDeleteFails.setDescription(\n \"\"\"The number of failed Dynamic Service Delete\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDCCReqs.setDescription(\n \"\"\"The number of Dynamic Channel Change Request\nmessages traversing an interface. This count\nis nonzero only on downstream direction rows.\nThis count should include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex\nthat indexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDCCRsps.setDescription(\n \"\"\"The number of Dynamic Channel Change Response\nmessages traversing an interface. This count is\nnonzero only on upstream direction rows. This count\nshould include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDCCAcks.setDescription(\n \"\"\"The number of Dynamic Channel Change Acknowledgement\nmessages traversing an interface. This count\nis nonzero only on downstream direction rows.\nThis count should include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDCCs.setDescription(\n \"\"\"The number of successful Dynamic Channel Change\ntransactions. This count is nonzero only on\ndownstream direction rows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosDCCFails.setDescription(\n \"\"\"The number of failed Dynamic Channel Change\ntransactions. This count is nonzero only on\ndownstream direction rows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogTable.setDescription(\n \"\"\"This table contains a log of the disconnected\nService Flows in a managed device.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogEntry.setDescription(\n \"\"\"The information regarding a single disconnected\nservice flow.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogIndex.setDescription(\n 'Unique index for a logged service flow.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogIfIndex.setDescription(\n \"\"\"The ifIndex of ifType docsCableMaclayer(127)\non the CMTS where the service flow was present.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogSFID.setDescription(\n 'The index assigned to the service flow by the CMTS.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogCmMac.setDescription(\n \"\"\"The MAC address for the cable modem associated with\nthe service flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogPkts.setDescription(\n \"\"\"The number of packets counted on this service flow\nafter payload header suppression.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogOctets.setDescription(\n \"\"\"The number of octets counted on this service flow\nafter payload header suppression.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogTimeDeleted.setDescription(\n \"\"\"The value of sysUpTime when the service flow\nwas deleted.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogTimeCreated.setDescription(\n \"\"\"The value of sysUpTime when the service flow\nwas created.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogTimeActive.setDescription(\n 'The total time that the service flow was active.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogDirection.setDescription(\n \"\"\"The value of docsIetfQosServiceFlowDirection\nfor the service flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogPrimary.setDescription(\n \"\"\"The value of docsIetfQosServiceFlowPrimary for the\nservice flow.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogServiceClassName.setDescription(\n \"\"\"The value of docsIetfQosParamSetServiceClassName for\nthe provisioned QOS Parameter Set of the\nservice flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogPolicedDropPkts.setDescription(\n \"\"\"The final value of\ndocsIetfQosServiceFlowPolicedDropPkts for the\nservice flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogPolicedDelayPkts.setDescription(\n \"\"\"The final value of\ndocsIetfQosServiceFlowPolicedDelayPkts for the\nservice flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceFlowLogControl.setDescription(\n \"\"\"Setting this object to the value destroy(6) removes\nthis entry from the table.\n\nReading this object returns the value active(1).\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassTable.setDescription(\n \"\"\"This table describes the set of DOCSIS-QOS\nService Classes in a CMTS.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassEntry.setDescription(\n \"\"\"A provisioned service class on a CMTS.\nEach entry defines a template for certain\nDOCSIS QOS Parameter Set values. When a CM\ncreates or modifies an Admitted QOS Parameter Set\nfor a Service Flow, it may reference a Service Class\nName instead of providing explicit QOS Parameter\nSet values. In this case, the CMTS populates\nthe QOS Parameter Set with the applicable\ncorresponding values from the named Service Class.\nSubsequent changes to a Service Class row do not\naffect the QOS Parameter Set values of any service\nflows already admitted.\n\nA service class template applies to only\na single direction, as indicated in the\ndocsIetfQosServiceClassDirection object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassName.setDescription(\n \"\"\"Service Class Name. DOCSIS specifies that the\nmaximum size is 16 ASCII characters including\na terminating zero. The terminating zero is not\nrepresented in this SnmpAdminString syntax object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassStatus.setDescription(\n \"\"\"Used to create or delete rows in this table.\nThere is no restriction on the ability to change\nvalues in this row while the row is active.\nInactive rows need not be timed out.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPriority.setDescription(\n 'Template for docsIetfQosParamSetPriority.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMaxTrafficRate.setDescription(\n 'Template for docsIetfQosParamSetMaxTrafficRate.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMaxTrafficBurst.setDescription(\n 'Template for docsIetfQosParamSetMaxTrafficBurst.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMinReservedRate.setDescription(\n 'Template for docsIetfQosParamSEtMinReservedRate.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMinReservedPkt.setDescription(\n 'Template for docsIetfQosParamSetMinReservedPkt.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMaxConcatBurst.setDescription(\n 'Template for docsIetfQosParamSetMaxConcatBurst.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassNomPollInterval.setDescription(\n 'Template for docsIetfQosParamSetNomPollInterval.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassTolPollJitter.setDescription(\n 'Template for docsIetfQosParamSetTolPollJitter.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassUnsolicitGrantSize.setDescription(\n 'Template for docsIetfQosParamSetUnsolicitGrantSize.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassNomGrantInterval.setDescription(\n 'Template for docsIetfQosParamSetNomGrantInterval.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassTolGrantJitter.setDescription(\n 'Template for docsIetfQosParamSetTolGrantJitter.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassGrantsPerInterval.setDescription(\n 'Template for docsIetfQosParamSetGrantsPerInterval.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassMaxLatency.setDescription(\n 'Template for docsIetfQosParamSetClassMaxLatency.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassActiveTimeout.setDescription(\n 'Template for docsIetfQosParamSetActiveTimeout.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassAdmittedTimeout.setDescription(\n 'Template for docsIetfQosParamSetAdmittedTimeout.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassSchedulingType.setDescription(\n 'Template for docsIetfQosParamSetSchedulingType.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassRequestPolicy.setDescription(\n 'Template for docsIetfQosParamSetRequestPolicyOct.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassTosAndMask.setDescription(\n \"\"\"Template for docsIetfQosParamSetTosAndMask.\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of\ndocsIetfQosServiceClassTosAndMask and\ndocsIetfQosServiceClassTosOrMask that would result\nin the modification of the ECN bits.\n\n\n\nIn particular, operators should not use values of\ndocsIetfQosServiceClassTosAndMask that have either\nof the least-significant two bits set to 0.\nSimilarly,operators should not use values of\ndocsIetfQosServiceClassTosOrMask that have either\nof the least-significant two bits set to 1.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassTosOrMask.setDescription(\n \"\"\"Template for docsIetfQosParamSetTosOrMask.\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of\ndocsIetfQosServiceClassTosAndMask and\ndocsIetfQosServiceClassTosOrMask that would result\nin the modification of the ECN bits.\n\nIn particular, operators should not use values of\ndocsIetfQosServiceClassTosAndMask that have either\nof the least-significant two bits set to 0.\nSimilarly, operators should not use values of\ndocsIetfQosServiceClassTosOrMask that have either\nof the least-significant two bits set to 1.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassDirection.setDescription(\n \"\"\"Specifies whether the service class template\napplies to upstream or downstream service flows.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassStorageType.setDescription(\n \"\"\"This object defines whether this row is kept in\nvolatile storage and lost upon reboot or whether\nit is backed up by non-volatile or permanent\nstorage. 'permanent' entries need not allow\nwritable access to any object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassDSCPOverwrite.setDescription(\n \"\"\"This object allows the overwrite of the DSCP\nfield per RFC 3260.\n\nIf this object is -1, then the corresponding entry's\ndocsIetfQosServiceClassTosAndMask value MUST be\n'FF'H and docsIetfQosServiceClassTosOrMask MUST be\n'00'H. Otherwise, this object is in the range of\n0..63, and the corresponding entry's\ndocsIetfQosServiceClassTosAndMask value MUST be\n'03'H and the docsIetfQosServiceClassTosOrMask MUST\nbe this object's value shifted left by two bit\npositions.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyTable.setDescription(\n \"\"\"This table describes the set of DOCSIS-QOS\nService Class Policies.\n\nThis table is an adjunct to the\n\n\n\ndocsDevFilterPolicy table. Entries in the\ndocsDevFilterPolicy table can point to\nspecific rows in this table.\n\nThis table permits mapping a packet to a service\nclass name of an active service flow so long as\na classifier does not exist at a higher\npriority.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyEntry.setDescription(\n 'A service class name policy entry.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyIndex.setDescription(\n \"\"\"Index value to identify an entry in\nthis table uniquely.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyName.setDescription(\n \"\"\"Service Class Name to identify the name of the\nservice class flow to which the packet should be\ndirected.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyRulePriority.setDescription(\n \"\"\"Service Class Policy rule priority for the\nentry.\"\"\")\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyStatus.setDescription(\n \"\"\"Used to create or delete rows in this table.\nThis object should not be deleted if it is\nreferenced by an entry in docsDevFilterPolicy.\nThe reference should be deleted first.\nThere is no restriction on the ability\nto change values in this row while the row is\nactive. Inactive rows need not be timed out.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassPolicyStorageType.setDescription(\n \"\"\"This object defines whether this row is kept in\nvolatile storage and lost upon reboot or whether\nit is backed up by non-volatile or permanent\nstorage. 'permanent' entries need not allow\nwritable access to any object.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSTable.setDescription(\n \"\"\"This table describes the set of payload header\nsuppression entries.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSEntry.setDescription(\n \"\"\"A payload header suppression entry.\n\nThe ifIndex is an ifType of docsCableMaclayer(127).\nThe index docsIetfQosServiceFlowId selects one\nservice flow from the cable MAC layer interface.\nThe docsIetfQosPktClassId index matches an\nindex of the docsIetfQosPktClassTable.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSField.setDescription(\n \"\"\"Payload header suppression field defines the\nbytes of the header that must be\nsuppressed/restored by the sending/receiving\ndevice.\n\nThe number of octets in this object should be\nthe same as the value of docsIetfQosPHSSize.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSMask.setDescription(\n \"\"\"Payload header suppression mask defines the\nbit mask that is used in combination with the\ndocsIetfQosPHSField. It defines which bytes in\nthe header must be suppressed/restored by the\nsending or receiving device.\n\nEach bit of this bit mask corresponds to a byte\nin the docsIetfQosPHSField, with the least\n\n\n\nsignificant bit corresponding to the first byte\nof the docsIetfQosPHSField.\n\nEach bit of the bit mask specifies whether\nthe corresponding byte should be suppressed\nin the packet. A bit value of '1' indicates that\nthe byte should be suppressed by the sending\ndevice and restored by the receiving device.\nA bit value of '0' indicates that\nthe byte should not be suppressed by the sending\ndevice or restored by the receiving device.\n\nIf the bit mask does not contain a bit for each\nbyte in the docsIetfQosPHSField, then the bit mask\nis extended with bit values of '1' to be the\nnecessary length.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSSize.setDescription(\n \"\"\"Payload header suppression size specifies the\nnumber of bytes in the header to be suppressed\nand restored.\n\nThe value of this object must match the number\nof bytes in the docsIetfQosPHSField.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSVerify.setDescription(\n \"\"\"Payload header suppression verification value. If\n'true', the sender must verify docsIetfQosPHSField\nis the same as what is contained in the packet\nto be suppressed.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosPHSIndex.setDescription(\n \"\"\"Payload header suppression index uniquely\n\n\n\nreferences the PHS rule for a given service flow.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsMacToSrvFlowTable.setDescription(\n \"\"\"This table provides for referencing the service\nflows associated with a particular cable modem.\nThis allows indexing into other docsIetfQos\ntables that are indexed by docsIetfQosServiceFlowId\nand ifIndex.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsMacToSrvFlowEntry.setDescription(\n \"\"\"An entry is created by CMTS for each service flow\nconnected to this CMTS.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsCmMac.setDescription(\n 'The MAC address for the referenced CM.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsServiceFlowId.setDescription(\n 'An index assigned to a service flow by CMTS.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsIfIndex.setDescription(\n \"\"\"The ifIndex of ifType docsCableMacLayer(127)\non the CMTS that is connected to the Cable Modem.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosBaseGroup.setDescription(\n \"\"\"Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosParamSetGroup.setDescription(\n \"\"\"Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems for QOS Parameter Sets.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCmtsGroup.setDescription(\n 'Group of objects implemented only in the CMTS.')\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosSrvClassPolicyGroup.setDescription(\n \"\"\"Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems when supporting policy-based\nservice flows.\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosServiceClassGroup.setDescription(\n \"\"\"Group of objects implemented only in Cable Modem\nTermination Systems when supporting expansion of Service\nClass Names in a QOS Parameter Set\"\"\"\n )\n<mask token>\nif mibBuilder.loadTexts:\n docsIetfQosCompliance.setDescription(\n \"\"\"The compliance statement for MCNS Cable Modems and\nCable Modem Termination Systems that implement DOCSIS\nService Flows.\"\"\"\n )\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', PYSNMP_MODULE_ID=docsIetfQosMIB)\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', DocsIetfQosBitRate=\n DocsIetfQosBitRate, DocsIetfQosRfMacIfDirection=\n DocsIetfQosRfMacIfDirection, DocsIetfQosSchedulingType=\n DocsIetfQosSchedulingType)\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', docsIetfQosMIB=docsIetfQosMIB,\n docsIetfQosNotifications=docsIetfQosNotifications,\n docsIetfQosMIBObjects=docsIetfQosMIBObjects, docsIetfQosPktClassTable=\n docsIetfQosPktClassTable, docsIetfQosPktClassEntry=\n docsIetfQosPktClassEntry, docsIetfQosPktClassId=docsIetfQosPktClassId,\n docsIetfQosPktClassDirection=docsIetfQosPktClassDirection,\n docsIetfQosPktClassPriority=docsIetfQosPktClassPriority,\n docsIetfQosPktClassIpTosLow=docsIetfQosPktClassIpTosLow,\n docsIetfQosPktClassIpTosHigh=docsIetfQosPktClassIpTosHigh,\n docsIetfQosPktClassIpTosMask=docsIetfQosPktClassIpTosMask,\n docsIetfQosPktClassIpProtocol=docsIetfQosPktClassIpProtocol,\n docsIetfQosPktClassInetAddressType=docsIetfQosPktClassInetAddressType,\n docsIetfQosPktClassInetSourceAddr=docsIetfQosPktClassInetSourceAddr,\n docsIetfQosPktClassInetSourceMask=docsIetfQosPktClassInetSourceMask,\n docsIetfQosPktClassInetDestAddr=docsIetfQosPktClassInetDestAddr,\n docsIetfQosPktClassInetDestMask=docsIetfQosPktClassInetDestMask,\n docsIetfQosPktClassSourcePortStart=docsIetfQosPktClassSourcePortStart,\n docsIetfQosPktClassSourcePortEnd=docsIetfQosPktClassSourcePortEnd,\n docsIetfQosPktClassDestPortStart=docsIetfQosPktClassDestPortStart,\n docsIetfQosPktClassDestPortEnd=docsIetfQosPktClassDestPortEnd,\n docsIetfQosPktClassDestMacAddr=docsIetfQosPktClassDestMacAddr,\n docsIetfQosPktClassDestMacMask=docsIetfQosPktClassDestMacMask,\n docsIetfQosPktClassSourceMacAddr=docsIetfQosPktClassSourceMacAddr,\n docsIetfQosPktClassEnetProtocolType=docsIetfQosPktClassEnetProtocolType,\n docsIetfQosPktClassEnetProtocol=docsIetfQosPktClassEnetProtocol,\n docsIetfQosPktClassUserPriLow=docsIetfQosPktClassUserPriLow,\n docsIetfQosPktClassUserPriHigh=docsIetfQosPktClassUserPriHigh,\n docsIetfQosPktClassVlanId=docsIetfQosPktClassVlanId,\n docsIetfQosPktClassStateActive=docsIetfQosPktClassStateActive,\n docsIetfQosPktClassPkts=docsIetfQosPktClassPkts,\n docsIetfQosPktClassBitMap=docsIetfQosPktClassBitMap,\n docsIetfQosParamSetTable=docsIetfQosParamSetTable,\n docsIetfQosParamSetEntry=docsIetfQosParamSetEntry,\n docsIetfQosParamSetServiceClassName=docsIetfQosParamSetServiceClassName,\n docsIetfQosParamSetPriority=docsIetfQosParamSetPriority,\n docsIetfQosParamSetMaxTrafficRate=docsIetfQosParamSetMaxTrafficRate,\n docsIetfQosParamSetMaxTrafficBurst=docsIetfQosParamSetMaxTrafficBurst,\n docsIetfQosParamSetMinReservedRate=docsIetfQosParamSetMinReservedRate,\n docsIetfQosParamSetMinReservedPkt=docsIetfQosParamSetMinReservedPkt,\n docsIetfQosParamSetActiveTimeout=docsIetfQosParamSetActiveTimeout,\n docsIetfQosParamSetAdmittedTimeout=docsIetfQosParamSetAdmittedTimeout,\n docsIetfQosParamSetMaxConcatBurst=docsIetfQosParamSetMaxConcatBurst,\n docsIetfQosParamSetSchedulingType=docsIetfQosParamSetSchedulingType,\n docsIetfQosParamSetNomPollInterval=docsIetfQosParamSetNomPollInterval,\n docsIetfQosParamSetTolPollJitter=docsIetfQosParamSetTolPollJitter,\n docsIetfQosParamSetUnsolicitGrantSize=\n docsIetfQosParamSetUnsolicitGrantSize,\n docsIetfQosParamSetNomGrantInterval=docsIetfQosParamSetNomGrantInterval,\n docsIetfQosParamSetTolGrantJitter=docsIetfQosParamSetTolGrantJitter,\n docsIetfQosParamSetGrantsPerInterval=\n docsIetfQosParamSetGrantsPerInterval, docsIetfQosParamSetTosAndMask=\n docsIetfQosParamSetTosAndMask, docsIetfQosParamSetTosOrMask=\n docsIetfQosParamSetTosOrMask, docsIetfQosParamSetMaxLatency=\n docsIetfQosParamSetMaxLatency, docsIetfQosParamSetType=\n docsIetfQosParamSetType, docsIetfQosParamSetRequestPolicyOct=\n docsIetfQosParamSetRequestPolicyOct, docsIetfQosParamSetBitMap=\n docsIetfQosParamSetBitMap, docsIetfQosServiceFlowTable=\n docsIetfQosServiceFlowTable, docsIetfQosServiceFlowEntry=\n docsIetfQosServiceFlowEntry, docsIetfQosServiceFlowId=\n docsIetfQosServiceFlowId, docsIetfQosServiceFlowSID=\n docsIetfQosServiceFlowSID, docsIetfQosServiceFlowDirection=\n docsIetfQosServiceFlowDirection, docsIetfQosServiceFlowPrimary=\n docsIetfQosServiceFlowPrimary, docsIetfQosServiceFlowStatsTable=\n docsIetfQosServiceFlowStatsTable, docsIetfQosServiceFlowStatsEntry=\n docsIetfQosServiceFlowStatsEntry, docsIetfQosServiceFlowPkts=\n docsIetfQosServiceFlowPkts, docsIetfQosServiceFlowOctets=\n docsIetfQosServiceFlowOctets, docsIetfQosServiceFlowTimeCreated=\n docsIetfQosServiceFlowTimeCreated, docsIetfQosServiceFlowTimeActive=\n docsIetfQosServiceFlowTimeActive, docsIetfQosServiceFlowPHSUnknowns=\n docsIetfQosServiceFlowPHSUnknowns,\n docsIetfQosServiceFlowPolicedDropPkts=\n docsIetfQosServiceFlowPolicedDropPkts,\n docsIetfQosServiceFlowPolicedDelayPkts=\n docsIetfQosServiceFlowPolicedDelayPkts, docsIetfQosUpstreamStatsTable=\n docsIetfQosUpstreamStatsTable, docsIetfQosUpstreamStatsEntry=\n docsIetfQosUpstreamStatsEntry, docsIetfQosSID=docsIetfQosSID,\n docsIetfQosUpstreamFragments=docsIetfQosUpstreamFragments,\n docsIetfQosUpstreamFragDiscards=docsIetfQosUpstreamFragDiscards,\n docsIetfQosUpstreamConcatBursts=docsIetfQosUpstreamConcatBursts,\n docsIetfQosDynamicServiceStatsTable=docsIetfQosDynamicServiceStatsTable,\n docsIetfQosDynamicServiceStatsEntry=docsIetfQosDynamicServiceStatsEntry,\n docsIetfQosIfDirection=docsIetfQosIfDirection, docsIetfQosDSAReqs=\n docsIetfQosDSAReqs, docsIetfQosDSARsps=docsIetfQosDSARsps,\n docsIetfQosDSAAcks=docsIetfQosDSAAcks, docsIetfQosDSCReqs=\n docsIetfQosDSCReqs, docsIetfQosDSCRsps=docsIetfQosDSCRsps,\n docsIetfQosDSCAcks=docsIetfQosDSCAcks, docsIetfQosDSDReqs=\n docsIetfQosDSDReqs, docsIetfQosDSDRsps=docsIetfQosDSDRsps,\n docsIetfQosDynamicAdds=docsIetfQosDynamicAdds,\n docsIetfQosDynamicAddFails=docsIetfQosDynamicAddFails,\n docsIetfQosDynamicChanges=docsIetfQosDynamicChanges,\n docsIetfQosDynamicChangeFails=docsIetfQosDynamicChangeFails,\n docsIetfQosDynamicDeletes=docsIetfQosDynamicDeletes,\n docsIetfQosDynamicDeleteFails=docsIetfQosDynamicDeleteFails,\n docsIetfQosDCCReqs=docsIetfQosDCCReqs, docsIetfQosDCCRsps=\n docsIetfQosDCCRsps, docsIetfQosDCCAcks=docsIetfQosDCCAcks,\n docsIetfQosDCCs=docsIetfQosDCCs, docsIetfQosDCCFails=\n docsIetfQosDCCFails, docsIetfQosServiceFlowLogTable=\n docsIetfQosServiceFlowLogTable, docsIetfQosServiceFlowLogEntry=\n docsIetfQosServiceFlowLogEntry, docsIetfQosServiceFlowLogIndex=\n docsIetfQosServiceFlowLogIndex, docsIetfQosServiceFlowLogIfIndex=\n docsIetfQosServiceFlowLogIfIndex, docsIetfQosServiceFlowLogSFID=\n docsIetfQosServiceFlowLogSFID, docsIetfQosServiceFlowLogCmMac=\n docsIetfQosServiceFlowLogCmMac, docsIetfQosServiceFlowLogPkts=\n docsIetfQosServiceFlowLogPkts, docsIetfQosServiceFlowLogOctets=\n docsIetfQosServiceFlowLogOctets, docsIetfQosServiceFlowLogTimeDeleted=\n docsIetfQosServiceFlowLogTimeDeleted,\n docsIetfQosServiceFlowLogTimeCreated=\n docsIetfQosServiceFlowLogTimeCreated,\n docsIetfQosServiceFlowLogTimeActive=docsIetfQosServiceFlowLogTimeActive,\n docsIetfQosServiceFlowLogDirection=docsIetfQosServiceFlowLogDirection,\n docsIetfQosServiceFlowLogPrimary=docsIetfQosServiceFlowLogPrimary,\n docsIetfQosServiceFlowLogServiceClassName=\n docsIetfQosServiceFlowLogServiceClassName,\n docsIetfQosServiceFlowLogPolicedDropPkts=\n docsIetfQosServiceFlowLogPolicedDropPkts,\n docsIetfQosServiceFlowLogPolicedDelayPkts=\n docsIetfQosServiceFlowLogPolicedDelayPkts,\n docsIetfQosServiceFlowLogControl=docsIetfQosServiceFlowLogControl,\n docsIetfQosServiceClassTable=docsIetfQosServiceClassTable,\n docsIetfQosServiceClassEntry=docsIetfQosServiceClassEntry,\n docsIetfQosServiceClassName=docsIetfQosServiceClassName,\n docsIetfQosServiceClassStatus=docsIetfQosServiceClassStatus,\n docsIetfQosServiceClassPriority=docsIetfQosServiceClassPriority,\n docsIetfQosServiceClassMaxTrafficRate=\n docsIetfQosServiceClassMaxTrafficRate,\n docsIetfQosServiceClassMaxTrafficBurst=\n docsIetfQosServiceClassMaxTrafficBurst,\n docsIetfQosServiceClassMinReservedRate=\n docsIetfQosServiceClassMinReservedRate,\n docsIetfQosServiceClassMinReservedPkt=\n docsIetfQosServiceClassMinReservedPkt,\n docsIetfQosServiceClassMaxConcatBurst=docsIetfQosServiceClassMaxConcatBurst\n )\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB',\n docsIetfQosServiceClassNomPollInterval=\n docsIetfQosServiceClassNomPollInterval,\n docsIetfQosServiceClassTolPollJitter=\n docsIetfQosServiceClassTolPollJitter,\n docsIetfQosServiceClassUnsolicitGrantSize=\n docsIetfQosServiceClassUnsolicitGrantSize,\n docsIetfQosServiceClassNomGrantInterval=\n docsIetfQosServiceClassNomGrantInterval,\n docsIetfQosServiceClassTolGrantJitter=\n docsIetfQosServiceClassTolGrantJitter,\n docsIetfQosServiceClassGrantsPerInterval=\n docsIetfQosServiceClassGrantsPerInterval,\n docsIetfQosServiceClassMaxLatency=docsIetfQosServiceClassMaxLatency,\n docsIetfQosServiceClassActiveTimeout=\n docsIetfQosServiceClassActiveTimeout,\n docsIetfQosServiceClassAdmittedTimeout=\n docsIetfQosServiceClassAdmittedTimeout,\n docsIetfQosServiceClassSchedulingType=\n docsIetfQosServiceClassSchedulingType,\n docsIetfQosServiceClassRequestPolicy=\n docsIetfQosServiceClassRequestPolicy, docsIetfQosServiceClassTosAndMask\n =docsIetfQosServiceClassTosAndMask, docsIetfQosServiceClassTosOrMask=\n docsIetfQosServiceClassTosOrMask, docsIetfQosServiceClassDirection=\n docsIetfQosServiceClassDirection, docsIetfQosServiceClassStorageType=\n docsIetfQosServiceClassStorageType,\n docsIetfQosServiceClassDSCPOverwrite=\n docsIetfQosServiceClassDSCPOverwrite,\n docsIetfQosServiceClassPolicyTable=docsIetfQosServiceClassPolicyTable,\n docsIetfQosServiceClassPolicyEntry=docsIetfQosServiceClassPolicyEntry,\n docsIetfQosServiceClassPolicyIndex=docsIetfQosServiceClassPolicyIndex,\n docsIetfQosServiceClassPolicyName=docsIetfQosServiceClassPolicyName,\n docsIetfQosServiceClassPolicyRulePriority=\n docsIetfQosServiceClassPolicyRulePriority,\n docsIetfQosServiceClassPolicyStatus=docsIetfQosServiceClassPolicyStatus,\n docsIetfQosServiceClassPolicyStorageType=\n docsIetfQosServiceClassPolicyStorageType, docsIetfQosPHSTable=\n docsIetfQosPHSTable, docsIetfQosPHSEntry=docsIetfQosPHSEntry,\n docsIetfQosPHSField=docsIetfQosPHSField, docsIetfQosPHSMask=\n docsIetfQosPHSMask, docsIetfQosPHSSize=docsIetfQosPHSSize,\n docsIetfQosPHSVerify=docsIetfQosPHSVerify, docsIetfQosPHSIndex=\n docsIetfQosPHSIndex, docsIetfQosCmtsMacToSrvFlowTable=\n docsIetfQosCmtsMacToSrvFlowTable, docsIetfQosCmtsMacToSrvFlowEntry=\n docsIetfQosCmtsMacToSrvFlowEntry, docsIetfQosCmtsCmMac=\n docsIetfQosCmtsCmMac, docsIetfQosCmtsServiceFlowId=\n docsIetfQosCmtsServiceFlowId, docsIetfQosCmtsIfIndex=\n docsIetfQosCmtsIfIndex, docsIetfQosConformance=docsIetfQosConformance,\n docsIetfQosGroups=docsIetfQosGroups, docsIetfQosCompliances=\n docsIetfQosCompliances)\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', docsIetfQosBaseGroup=\n docsIetfQosBaseGroup, docsIetfQosParamSetGroup=docsIetfQosParamSetGroup,\n docsIetfQosCmtsGroup=docsIetfQosCmtsGroup,\n docsIetfQosSrvClassPolicyGroup=docsIetfQosSrvClassPolicyGroup,\n docsIetfQosServiceClassGroup=docsIetfQosServiceClassGroup)\nmibBuilder.exportSymbols('DOCS-IETF-QOS-MIB', docsIetfQosCompliance=\n docsIetfQosCompliance)\n",
"step-5": "# PySNMP SMI module. Autogenerated from smidump -f python DOCS-IETF-QOS-MIB\n# by libsmi2pysnmp-0.1.3 at Thu May 22 11:57:36 2014,\n# Python version sys.version_info(major=2, minor=7, micro=2, releaselevel='final', serial=0)\n\n# Imports\n\n( Integer, ObjectIdentifier, OctetString, ) = mibBuilder.importSymbols(\"ASN1\", \"Integer\", \"ObjectIdentifier\", \"OctetString\")\n( NamedValues, ) = mibBuilder.importSymbols(\"ASN1-ENUMERATION\", \"NamedValues\")\n( ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ) = mibBuilder.importSymbols(\"ASN1-REFINEMENT\", \"ConstraintsIntersection\", \"ConstraintsUnion\", \"SingleValueConstraint\", \"ValueRangeConstraint\", \"ValueSizeConstraint\")\n( DscpOrAny, ) = mibBuilder.importSymbols(\"DIFFSERV-DSCP-TC\", \"DscpOrAny\")\n( InterfaceIndex, ifIndex, ) = mibBuilder.importSymbols(\"IF-MIB\", \"InterfaceIndex\", \"ifIndex\")\n( InetAddress, InetAddressType, InetPortNumber, ) = mibBuilder.importSymbols(\"INET-ADDRESS-MIB\", \"InetAddress\", \"InetAddressType\", \"InetPortNumber\")\n( SnmpAdminString, ) = mibBuilder.importSymbols(\"SNMP-FRAMEWORK-MIB\", \"SnmpAdminString\")\n( ModuleCompliance, ObjectGroup, ) = mibBuilder.importSymbols(\"SNMPv2-CONF\", \"ModuleCompliance\", \"ObjectGroup\")\n( Bits, Counter32, Counter64, Integer32, Integer32, ModuleIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Unsigned32, mib_2, ) = mibBuilder.importSymbols(\"SNMPv2-SMI\", \"Bits\", \"Counter32\", \"Counter64\", \"Integer32\", \"Integer32\", \"ModuleIdentity\", \"MibIdentifier\", \"MibScalar\", \"MibTable\", \"MibTableRow\", \"MibTableColumn\", \"TimeTicks\", \"Unsigned32\", \"mib-2\")\n( MacAddress, RowStatus, StorageType, TextualConvention, TimeStamp, TruthValue, ) = mibBuilder.importSymbols(\"SNMPv2-TC\", \"MacAddress\", \"RowStatus\", \"StorageType\", \"TextualConvention\", \"TimeStamp\", \"TruthValue\")\n\n# Types\n\nclass DocsIetfQosBitRate(TextualConvention, Unsigned32):\n displayHint = \"d\"\n \nclass DocsIetfQosRfMacIfDirection(Integer):\n subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(2,1,)\n namedValues = NamedValues((\"downstream\", 1), (\"upstream\", 2), )\n \nclass DocsIetfQosSchedulingType(Integer):\n subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(3,1,5,6,2,4,)\n namedValues = NamedValues((\"undefined\", 1), (\"bestEffort\", 2), (\"nonRealTimePollingService\", 3), (\"realTimePollingService\", 4), (\"unsolictedGrantServiceWithAD\", 5), (\"unsolictedGrantService\", 6), )\n \n\n# Objects\n\ndocsIetfQosMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 127)).setRevisions((\"2006-01-23 00:00\",))\nif mibBuilder.loadTexts: docsIetfQosMIB.setOrganization(\"IETF IP over Cable Data Network (IPCDN)\\nWorking Group\")\nif mibBuilder.loadTexts: docsIetfQosMIB.setContactInfo(\"\\nCo-Author: Michael Patrick\\nPostal: Motorola BCS\\n 111 Locke Drive\\n Marlborough, MA 01752-7214\\n U.S.A.\\nPhone: +1 508 786 7563\\nE-mail: [email protected]\\n\\nCo-Author: William Murwin\\nPostal: Motorola BCS\\n 111 Locke Drive\\n Marlborough, MA 01752-7214\\n U.S.A.\\nPhone: +1 508 786 7594\\nE-mail: [email protected]\\n\\nIETF IPCDN Working Group\\nGeneral Discussion: [email protected]\\nSubscribe: http://www.ietf.org/mailman/listinfo/ipcdn\\nArchive: ftp://ftp.ietf.org/ietf-mail-archive/ipcdn\\nCo-chairs: Richard Woundy, [email protected]\\n Jean-Francois Mule, [email protected]\")\nif mibBuilder.loadTexts: docsIetfQosMIB.setDescription(\"This is the management information for\\nQuality Of Service (QOS) for DOCSIS 1.1 and 2.0.\\n\\n\\n\\nCopyright (C) The Internet Society (2006). This version of\\nthis MIB module is part of RFC 4323; see the RFC itself for\\nfull legal notices.\")\ndocsIetfQosNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 0))\ndocsIetfQosMIBObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 1))\ndocsIetfQosPktClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 1))\nif mibBuilder.loadTexts: docsIetfQosPktClassTable.setDescription(\"This table describes the packet classification\\nconfigured on the CM or CMTS.\\nThe model is that a packet either received\\nas input from an interface or transmitted\\nfor output on an interface may be compared\\nagainst an ordered list of rules pertaining to\\nthe packet contents. Each rule is a row of this\\ntable. A matching rule provides a Service Flow\\nID to which the packet is classified.\\nAll rules need to match for a packet to match\\na classifier.\\n\\nThe objects in this row correspond to a set of\\nClassifier Encoding parameters in a DOCSIS\\nMAC management message. The\\ndocsIetfQosPktClassBitMap indicates which\\nparticular parameters were present in the\\nclassifier as signaled in the DOCSIS message.\\nIf the referenced parameter was not present\\nin the signaled DOCSIS 1.1 and 2.0 Classifier, the\\ncorresponding object in this row reports a\\nvalue as specified in the DESCRIPTION section.\")\ndocsIetfQosPktClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 1, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowId\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassId\"))\nif mibBuilder.loadTexts: docsIetfQosPktClassEntry.setDescription(\"An entry in this table provides a single packet\\nclassifier rule. The index ifIndex is an ifType\\nof docsCableMaclayer(127).\")\ndocsIetfQosPktClassId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosPktClassId.setDescription(\"Index assigned to packet classifier entry by\\nthe CMTS, which is unique per Service Flow.\")\ndocsIetfQosPktClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 2), DocsIetfQosRfMacIfDirection()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassDirection.setDescription(\"Indicates the direction to which the classifier\\nis applied.\")\ndocsIetfQosPktClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassPriority.setDescription(\"The value specifies the order of evaluation\\nof the classifiers.\\n\\nThe higher the value, the higher the priority.\\nThe value of 0 is used as default in\\nprovisioned Service Flows Classifiers.\\nThe default value of 64 is used for dynamic\\nService Flow Classifiers.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the default\\nvalue as defined above.\")\ndocsIetfQosPktClassIpTosLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassIpTosLow.setDescription(\"The low value of a range of TOS byte values.\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value\\nof 0.\\n\\nThe IP TOS octet, as originally defined in RFC 791,\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). This object is defined as an 8-bit\\noctet as per the DOCSIS Specification\\nfor packet classification.\")\ndocsIetfQosPktClassIpTosHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassIpTosHigh.setDescription(\"The 8-bit high value of a range of TOS byte\\nvalues.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the\\nvalue of 0.\\n\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). This object is defined as an 8-bit\\noctet as defined by the DOCSIS Specification\\nfor packet classification.\")\ndocsIetfQosPktClassIpTosMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassIpTosMask.setDescription(\"The mask value is bitwise ANDed with TOS byte\\nin an IP packet, and this value is used for\\nrange checking of TosLow and TosHigh.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value\\nof 0.\\n\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). This object is defined as an 8-bit\\noctet per the DOCSIS Specification for packet\\nclassification.\")\ndocsIetfQosPktClassIpProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 258))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassIpProtocol.setDescription(\"This object indicates the value of the IP\\nProtocol field required for IP packets to match\\nthis rule.\\n\\n\\n\\n\\nThe value 256 matches traffic with any IP Protocol\\nvalue. The value 257 by convention matches both TCP\\nand UDP.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value\\nof 258.\")\ndocsIetfQosPktClassInetAddressType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 8), InetAddressType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassInetAddressType.setDescription(\"The type of the Internet address for\\ndocsIetfQosPktClassInetSourceAddr,\\ndocsIetfQosPktClassInetSourceMask,\\ndocsIetfQosPktClassInetDestAddr, and\\ndocsIetfQosPktClassInetDestMask.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\nipv4(1).\")\ndocsIetfQosPktClassInetSourceAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 9), InetAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassInetSourceAddr.setDescription(\"This object specifies the value of the IP\\nSource Address required for packets to match\\nthis rule.\\n\\nAn IP packet matches the rule when the packet\\nIP Source Address bitwise ANDed with the\\ndocsIetfQosPktClassInetSourceMask value equals the\\ndocsIetfQosPktClassInetSourceAddr value.\\n\\nThe address type of this object is specified by\\ndocsIetfQosPktClassInetAddressType.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'00000000'H.\")\ndocsIetfQosPktClassInetSourceMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 10), InetAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassInetSourceMask.setDescription(\"This object specifies which bits of a packet's\\nIP Source Address are compared to match\\nthis rule.\\n\\nAn IP packet matches the rule when the packet\\nsource address bitwise ANDed with the\\ndocsIetfQosPktClassInetSourceMask value equals the\\ndocsIetfQosIpPktClassInetSourceAddr value.\\n\\nThe address type of this object is specified by\\ndocsIetfQosPktClassInetAddressType.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'FFFFFFFF'H.\")\ndocsIetfQosPktClassInetDestAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 11), InetAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassInetDestAddr.setDescription(\"This object specifies the value of the IP\\nDestination Address required for packets to match\\nthis rule.\\n\\nAn IP packet matches the rule when the packet\\nIP Destination Address bitwise ANDed with the\\ndocsIetfQosPktClassInetDestMask value\\nequals the docsIetfQosPktClassInetDestAddr value.\\n\\nThe address type of this object is specified by\\ndocsIetfQosPktClassInetAddressType.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'00000000'H.\")\ndocsIetfQosPktClassInetDestMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 12), InetAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassInetDestMask.setDescription(\"This object specifies which bits of a packet's\\nIP Destination Address are compared to\\nmatch this rule.\\n\\nAn IP packet matches the rule when the packet\\ndestination address bitwise ANDed with the\\ndocsIetfQosPktClassInetDestMask value equals the\\ndocsIetfQosIpPktClassInetDestAddr value.\\n\\nThe address type of this object is specified by\\ndocsIetfQosPktClassInetAddressType.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'FFFFFFFF'H.\")\ndocsIetfQosPktClassSourcePortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 13), InetPortNumber()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassSourcePortStart.setDescription(\"This object specifies the low-end inclusive\\nrange of TCP/UDP source port numbers to which\\na packet is compared. This object is irrelevant\\nfor non-TCP/UDP IP packets.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value\\nof 0.\")\ndocsIetfQosPktClassSourcePortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 14), InetPortNumber()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassSourcePortEnd.setDescription(\"This object specifies the high-end inclusive\\nrange of TCP/UDP source port numbers to which\\na packet is compared. This object is irrelevant\\nfor non-TCP/UDP IP packets.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n65535.\")\ndocsIetfQosPktClassDestPortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 15), InetPortNumber()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassDestPortStart.setDescription(\"This object specifies the low-end inclusive\\nrange of TCP/UDP destination port numbers to\\nwhich a packet is compared.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value\\nof 0.\")\ndocsIetfQosPktClassDestPortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 16), InetPortNumber()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassDestPortEnd.setDescription(\"This object specifies the high-end inclusive\\nrange of TCP/UDP destination port numbers to which\\na packet is compared.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n65535.\")\ndocsIetfQosPktClassDestMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 17), MacAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassDestMacAddr.setDescription(\"An Ethernet packet matches an entry when its\\ndestination MAC address bitwise ANDed with\\ndocsIetfQosPktClassDestMacMask equals the value of\\ndocsIetfQosPktClassDestMacAddr.\\n\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'000000000000'H.\")\ndocsIetfQosPktClassDestMacMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 18), MacAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassDestMacMask.setDescription(\"An Ethernet packet matches an entry when its\\ndestination MAC address bitwise ANDed with\\ndocsIetfQosPktClassDestMacMask equals the value of\\ndocsIetfQosPktClassDestMacAddr.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'000000000000'H.\")\ndocsIetfQosPktClassSourceMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 19), MacAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassSourceMacAddr.setDescription(\"An Ethernet packet matches this entry when its\\nsource MAC address equals the value of\\nthis object.\\n\\nIf the referenced parameter is not present\\nin a classifier, this object reports the value of\\n'FFFFFFFFFFFF'H.\")\ndocsIetfQosPktClassEnetProtocolType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(2,0,1,4,3,)).subtype(namedValues=NamedValues((\"none\", 0), (\"ethertype\", 1), (\"dsap\", 2), (\"mac\", 3), (\"all\", 4), ))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocolType.setDescription(\"This object indicates the format of the layer 3\\nprotocol ID in the Ethernet packet. A value of\\nnone(0) means that the rule does not use the\\nlayer 3 protocol type as a matching criteria.\\n\\nA value of ethertype(1) means that the rule\\napplies only to frames that contain an\\nEtherType value. Ethertype values are contained\\nin packets using the Dec-Intel-Xerox (DIX)\\nencapsulation or the RFC1042 Sub-Network Access\\nProtocol (SNAP) encapsulation formats.\\n\\nA value of dsap(2) means that the rule applies\\n\\n\\n\\nonly to frames using the IEEE802.3\\nencapsulation format with a Destination Service\\nAccess Point (DSAP) other\\nthan 0xAA (which is reserved for SNAP).\\n\\nA value of mac(3) means that the rule applies\\nonly to MAC management messages for MAC management\\nmessages.\\n\\nA value of all(4) means that the rule matches\\nall Ethernet packets.\\n\\nIf the Ethernet frame contains an 802.1P/Q Tag\\nheader (i.e., EtherType 0x8100), this object\\napplies to the embedded EtherType field within\\nthe 802.1P/Q header.\\n\\nIf the referenced parameter is not present in a\\nclassifier, this object reports the value of 0.\")\ndocsIetfQosPktClassEnetProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocol.setDescription(\"If docsIetfQosEthPktClassProtocolType is none(0),\\nthis object is ignored when considering whether\\na packet matches the current rule.\\n\\nIf dosQosPktClassEnetProtocolType is ethertype(1),\\nthis object gives the 16-bit value of the\\nEtherType that the packet must match in order to\\nmatch the rule.\\n\\nIf docsIetfQosPktClassEnetProtocolType is dsap(2),\\nthe lower 8 bits of this object's value must match\\nthe DSAP byte of the packet in order to match the\\nrule.\\n\\nIf docsIetfQosPktClassEnetProtocolType is mac(3),\\nthe lower 8 bits of this object's value represent a\\nlower bound (inclusive) of MAC management message\\ntype codes matched, and the upper 8 bits represent\\nthe upper bound (inclusive) of matched MAC message\\ntype codes. Certain message type codes are\\nexcluded from matching, as specified in the\\nreference.\\n\\n\\n\\nIf the Ethernet frame contains an 802.1P/Q Tag\\nheader (i.e., EtherType 0x8100), this object applies\\nto the embedded EtherType field within the 802.1P/Q\\nheader.\\n\\nIf the referenced parameter is not present in the\\nclassifier, the value of this object is reported\\nas 0.\")\ndocsIetfQosPktClassUserPriLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassUserPriLow.setDescription(\"This object applies only to Ethernet frames\\nusing the 802.1P/Q tag header (indicated with\\nEtherType 0x8100). Such frames include a 16-bit\\nTag that contains a 3-bit Priority field and\\na 12-bit VLAN number.\\n\\nTagged Ethernet packets must have a 3-bit\\nPriority field within the range of\\ndocsIetfQosPktClassPriLow to\\ndocsIetfQosPktClassPriHigh in order to match this\\nrule.\\n\\nIf the referenced parameter is not present in the\\nclassifier, the value of this object is reported\\nas 0.\")\ndocsIetfQosPktClassUserPriHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassUserPriHigh.setDescription(\"This object applies only to Ethernet frames\\nusing the 802.1P/Qtag header (indicated with\\nEtherType 0x8100). Such frames include a 16-bit\\nTag that contains a 3-bit Priority field and\\na 12-bit VLAN number.\\n\\nTagged Ethernet packets must have a 3-bit\\nPriority field within the range of\\ndocsIetfQosPktClassPriLow to\\ndocsIetfQosPktClassPriHigh in order to match this\\nrule.\\n\\n\\n\\nIf the referenced parameter is not present in the\\nclassifier, the value of this object is reported\\nas 7.\")\ndocsIetfQosPktClassVlanId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassVlanId.setDescription(\"This object applies only to Ethernet frames\\nusing the 802.1P/Q tag header.\\n\\nTagged packets must have a VLAN Identifier that\\nmatches the value in order to match the rule.\\n\\nIf the referenced parameter is not present in the\\nclassifier, the value of this object is reported\\nas 0.\")\ndocsIetfQosPktClassStateActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 25), TruthValue()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassStateActive.setDescription(\"This object indicates whether or not the classifier\\nis enabled to classify packets to a Service Flow.\\n\\nIf the referenced parameter is not present in the\\nclassifier, the value of this object is reported\\nas true(1).\")\ndocsIetfQosPktClassPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 26), Counter64()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassPkts.setDescription(\"This object counts the number of packets that have\\nbeen classified using this entry. This\\nincludes all packets delivered to a Service Flow\\nmaximum rate policing function, whether or not that\\nfunction drops the packets.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosPktClassBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 27), Bits().subtype(namedValues=NamedValues((\"rulePriority\", 0), (\"activationState\", 1), (\"destPortStart\", 10), (\"destPortEnd\", 11), (\"destMac\", 12), (\"sourceMac\", 13), (\"ethertype\", 14), (\"userPri\", 15), (\"vlanId\", 16), (\"ipTos\", 2), (\"ipProtocol\", 3), (\"ipSourceAddr\", 4), (\"ipSourceMask\", 5), (\"ipDestAddr\", 6), (\"ipDestMask\", 7), (\"sourcePortStart\", 8), (\"sourcePortEnd\", 9), ))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPktClassBitMap.setDescription(\"This object indicates which parameter encodings\\nwere actually present in the DOCSIS packet\\nclassifier encoding signaled in the DOCSIS message\\nthat created or modified the classifier. Note that\\nDynamic Service Change messages have replace\\nsemantics, so that all non-default parameters must\\nbe present whether the classifier is being created\\nor changed.\\n\\nA bit of this object is set to 1 if the parameter\\nindicated by the comment was present in the\\nclassifier encoding, and to 0 otherwise.\\n\\nNote that BITS are encoded most significant bit\\nfirst, so that if, for example, bits 6 and 7 are\\nset, this object is encoded as the octet string\\n'030000'H.\")\ndocsIetfQosParamSetTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 2))\nif mibBuilder.loadTexts: docsIetfQosParamSetTable.setDescription(\"This table describes the set of DOCSIS 1.1 and 2.0\\nQOS parameters defined in a managed device.\\n\\nThe ifIndex index specifies a DOCSIS MAC Domain.\\nThe docsIetfQosServiceFlowId index specifies a\\nparticular Service Flow.\\nThe docsIetfQosParamSetType index indicates whether\\nthe active, admitted, or provisioned QOS Parameter\\nSet is being described by the row.\\n\\nOnly the QOS Parameter Sets of DOCSIS 1.1 and 2.0\\nService Flows are represented in this table.\\n\\nDOCSIS 1.0 QOS service profiles are not\\nrepresented in this table.\\n\\nEach row corresponds to a DOCSIS QOS Parameter Set\\nas signaled via DOCSIS MAC management messages.\\nEach object in the row corresponds to one or\\npart of one DOCSIS 1.1 Service Flow Encoding.\\nThe docsIetfQosParamSetBitMap object in the row\\nindicates which particular parameters were signaled\\nin the original registration or dynamic service\\nrequest message that created the QOS Parameter Set.\\n\\nIn many cases, even if a QOS Parameter Set parameter\\nwas not signaled, the DOCSIS specification calls\\nfor a default value to be used. That default value\\nis reported as the value of the corresponding object\\nin this row.\\n\\nMany objects are not applicable, depending on\\nthe Service Flow direction or upstream scheduling\\ntype. The object value reported in this case\\nis specified in the DESCRIPTION clause.\")\ndocsIetfQosParamSetEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 2, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowId\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetType\"))\nif mibBuilder.loadTexts: docsIetfQosParamSetEntry.setDescription(\"A unique set of QOS parameters.\")\ndocsIetfQosParamSetServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 1), SnmpAdminString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetServiceClassName.setDescription(\"Refers to the Service Class Name from which the\\nparameter set values were derived.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object is a zero-length string.\")\ndocsIetfQosParamSetPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetPriority.setDescription(\"The relative priority of a Service Flow.\\nHigher numbers indicate higher priority.\\nThis priority should only be used to differentiate\\n\\n\\n\\nService Flow from identical parameter sets.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object is 0. If the parameter is\\nnot applicable, the reported value is 0.\")\ndocsIetfQosParamSetMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 3), DocsIetfQosBitRate()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficRate.setDescription(\"Maximum sustained traffic rate allowed for this\\nService Flow in bits/sec. Must count all MAC frame\\ndata PDU from the bytes following the MAC header\\nHCS to the end of the CRC. The number of bytes\\nforwarded is limited during any time interval.\\nThe value 0 means no maximum traffic rate is\\nenforced. This object applies to both upstream and\\ndownstream Service Flows.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object is 0. If the parameter is\\nnot applicable, it is reported as 0.\")\ndocsIetfQosParamSetMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 4), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficBurst.setDescription(\"Specifies the token bucket size in bytes\\nfor this parameter set. The value is calculated\\nfrom the byte following the MAC header HCS to\\nthe end of the CRC. This object is applied in\\nconjunction with docsIetfQosParamSetMaxTrafficRate\\nto calculate maximum sustained traffic rate.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object for scheduling types\\nbestEffort (2), nonRealTimePollingService(3),\\nand realTimePollingService(4) is 3044.\\n\\nIf this parameter is not applicable, it is reported\\nas 0.\")\ndocsIetfQosParamSetMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 5), DocsIetfQosBitRate()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMinReservedRate.setDescription(\"Specifies the guaranteed minimum rate in\\nbits/sec for this parameter set. The value is\\ncalculated from the byte following the MAC\\nheader HCS to the end of the CRC. The default\\nvalue of 0 means that no bandwidth is reserved.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object is 0. If the parameter\\nis not applicable, it is reported as 0.\")\ndocsIetfQosParamSetMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMinReservedPkt.setDescription(\"Specifies an assumed minimum packet size in\\nbytes for which the\\ndocsIetfQosParamSetMinReservedRate will be\\nprovided. The value is calculated from the byte\\nfollowing the MAC header HCS to the end of the\\nCRC.\\n\\nIf the referenced parameter is omitted from a\\nDOCSIS QOS parameter set, the default value is\\nCMTS implementation dependent. In this case, the\\nCMTS reports the default value it is using, and the\\nCM reports a value of 0. If the referenced\\nparameter is not applicable to the direction or\\nscheduling type of the Service Flow, both CMTS and\\nCM report this object's value as 0.\")\ndocsIetfQosParamSetActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetActiveTimeout.setDescription(\"Specifies the maximum duration in seconds that\\nresources remain unused on an active service\\nflow before CMTS signals that both active and\\nadmitted parameters set are null. The default\\nvalue of 0 signifies an infinite amount of time.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object is 0.\")\ndocsIetfQosParamSetAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetAdmittedTimeout.setDescription(\"Specifies the maximum duration in seconds that\\nresources remain in admitted state before\\nresources must be released.\\n\\nThe value of 0 signifies an infinite amount\\nof time.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the\\ndefault value of this object is 200.\")\ndocsIetfQosParamSetMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMaxConcatBurst.setDescription(\"Specifies the maximum concatenated burst in\\nbytes that an upstream Service Flow is allowed.\\nThe value is calculated from the FC byte of the\\nConcatenation MAC Header to the last CRC byte in\\nof the last concatenated MAC frame, inclusive.\\nThe value of 0 specifies no maximum burst.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set, the default\\nvalue of this object for scheduling types\\nbestEffort(2), nonRealTimePollingService(3), and\\n\\n\\n\\nrealTimePollingService(4) is 1522. If the parameter\\nis not applicable, this object's value is reported\\nas 0.\")\ndocsIetfQosParamSetSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 10), DocsIetfQosSchedulingType()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetSchedulingType.setDescription(\"Specifies the upstream scheduling service used for\\nupstream Service Flow.\\n\\nIf the referenced parameter is not present in the\\ncorresponding DOCSIS QOS Parameter Set of an\\nupstream Service Flow, the default value of this\\nobject is bestEffort(2). For QOS parameter sets of\\ndownstream Service Flows, this object's value is\\nreported as undefined(1).\")\ndocsIetfQosParamSetNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 11), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetNomPollInterval.setDescription(\"Specifies the nominal interval in microseconds\\nbetween successive unicast request\\nopportunities on an upstream Service Flow.\\n\\nThis object applies only to upstream Service Flows\\nwith DocsIetfQosSchedulingType of value\\nnonRealTimePollingService(3),\\nrealTimePollingService(4), and\\nunsolictedGrantServiceWithAD(5). The parameter is\\nmandatory for realTimePollingService(4). If the\\nparameter is omitted with\\nnonRealTimePollingService(3), the CMTS uses an\\nimplementation-dependent value. If the parameter\\nis omitted with unsolictedGrantServiceWithAD(5),\\nthe CMTS uses as a default value the value of the\\nNominal Grant Interval parameter. In all cases,\\nthe CMTS reports the value it is using when the\\nparameter is applicable. The CM reports the\\nsignaled parameter value if it was signaled,\\nand 0 otherwise.\\n\\n\\n\\nIf the referenced parameter is not applicable to\\nthe direction or scheduling type of the\\ncorresponding DOCSIS QOS Parameter Set, both\\nCMTS and CM report this object's value as 0.\")\ndocsIetfQosParamSetTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 12), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetTolPollJitter.setDescription(\"Specifies the maximum amount of time in\\nmicroseconds that the unicast request interval\\nmay be delayed from the nominal periodic\\nschedule on an upstream Service Flow.\\n\\nThis parameter is applicable only to upstream\\nService Flows with a DocsIetfQosSchedulingType of\\nrealTimePollingService(4) or\\nunsolictedGrantServiceWithAD(5).\\n\\nIf the referenced parameter is applicable but not\\npresent in the corresponding DOCSIS QOS Parameter\\nSet, the CMTS uses an implementation-dependent\\nvalue and reports the value it is using.\\nThe CM reports a value of 0 in this case.\\n\\nIf the parameter is not applicable to the\\ndirection or upstream scheduling type of the\\nService Flow, both CMTS and CM report this\\nobject's value as 0.\")\ndocsIetfQosParamSetUnsolicitGrantSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetUnsolicitGrantSize.setDescription(\"Specifies the unsolicited grant size in bytes.\\nThe grant size includes the entire MAC frame\\ndata PDU from the Frame Control byte to the end\\nof the MAC frame.\\n\\nThe referenced parameter is applicable only\\nfor upstream flows with a DocsIetfQosSchedulingType\\nof unsolicitedGrantServicewithAD(5) or\\nunsolicitedGrantService(6), and it is mandatory\\n\\n\\n\\nwhen applicable. Both CMTS and CM report\\nthe signaled value of the parameter in this\\ncase.\\n\\nIf the referenced parameter is not applicable to\\nthe direction or scheduling type of the\\ncorresponding DOCSIS QOS Parameter Set, both\\nCMTS and CM report this object's value as 0.\")\ndocsIetfQosParamSetNomGrantInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 14), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetNomGrantInterval.setDescription(\"Specifies the nominal interval in microseconds\\nbetween successive data grant opportunities\\non an upstream Service Flow.\\n\\nThe referenced parameter is applicable only\\nfor upstream flows with a DocsIetfQosSchedulingType\\nof unsolicitedGrantServicewithAD(5) or\\nunsolicitedGrantService(6), and it is mandatory\\nwhen applicable. Both CMTS and CM report the\\nsignaled value of the parameter in this case.\\n\\nIf the referenced parameter is not applicable to\\nthe direction or scheduling type of the\\ncorresponding DOCSIS QOS Parameter Set, both\\nCMTS and CM report this object's value as 0.\")\ndocsIetfQosParamSetTolGrantJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 15), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetTolGrantJitter.setDescription(\"Specifies the maximum amount of time in\\nmicroseconds that the transmission opportunities\\nmay be delayed from the nominal periodic schedule.\\n\\nThe referenced parameter is applicable only\\nfor upstream flows with a DocsIetfQosSchedulingType\\nof unsolicitedGrantServicewithAD(5) or\\nunsolicitedGrantService(6), and it is mandatory\\nwhen applicable. Both CMTS and CM report the\\n\\n\\n\\nsignaled value of the parameter in this case.\\n\\nIf the referenced parameter is not applicable to\\nthe direction or scheduling type of the\\ncorresponding DOCSIS QOS Parameter Set, both\\nCMTS and CM report this object's value as 0.\")\ndocsIetfQosParamSetGrantsPerInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetGrantsPerInterval.setDescription(\"Specifies the number of data grants per Nominal\\nGrant Interval\\n(docsIetfQosParamSetNomGrantInterval).\\n\\nThe referenced parameter is applicable only\\nfor upstream flows with a DocsIetfQosSchedulingType\\nof unsolicitedGrantServicewithAD(5) or\\nunsolicitedGrantService(6), and it is mandatory\\nwhen applicable. Both CMTS and CM report the\\nsignaled value of the parameter in this case.\\n\\nIf the referenced parameter is not applicable to\\nthe direction or scheduling type of the\\ncorresponding DOCSIS QOS Parameter Set, both\\nCMTS and CM report this object's value as 0.\")\ndocsIetfQosParamSetTosAndMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 17), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetTosAndMask.setDescription(\"Specifies the AND mask for the IP TOS byte for\\noverwriting IP packet's TOS value. The IP packet\\nTOS byte is bitwise ANDed with\\ndocsIetfQosParamSetTosAndMask, and the result is\\nbitwise ORed with docsIetfQosParamSetTosORMask and\\nthe result is written to the IP packet TOS byte.\\nA value of 'FF'H for docsIetfQosParamSetTosAndMask\\nand a value of '00'H for\\ndocsIetfQosParamSetTosOrMask means that the IP\\nPacket TOS byte is not overwritten.\\n\\nThis combination is reported if the referenced\\nparameter is not present in a QOS Parameter Set.\\n\\n\\n\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). Network operators SHOULD avoid\\nspecifying values of docsIetfQosParamSetTosAndMask\\nand docsIetfQosParamSetTosORMask that would result\\nin the modification of the ECN bits.\\n\\nIn particular, operators should not use values of\\ndocsIetfQosParamSetTosAndMask that have either of\\nthe least-significant two bits set to 0. Similarly,\\noperators should not use values of\\ndocsIetfQosParamSetTosORMask that have either of\\nthe least-significant two bits set to 1.\\n\\nEven though this object is only enforced by the\\nCable Modem Termination System (CMTS),\\nCable Modems MUST report the value as signaled in\\nthe referenced parameter.\")\ndocsIetfQosParamSetTosOrMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 18), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetTosOrMask.setDescription(\"Specifies the OR mask for the IP TOS byte.\\n\\nSee the description of docsIetfQosParamSetTosAndMask\\nfor further details.\\n\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). Network operators SHOULD avoid\\nspecifying values of docsIetfQosParamSetTosAndMask\\nand docsIetfQosParamSetTosORMask that would result\\nin the modification of the ECN bits.\")\ndocsIetfQosParamSetMaxLatency = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 19), Unsigned32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetMaxLatency.setDescription(\"Specifies the maximum latency between the\\nreception of a packet by the CMTS on its NSI\\nand the forwarding of the packet to the RF\\ninterface. A value of 0 signifies no maximum\\nlatency is enforced. This object only applies to\\ndownstream Service Flows.\\n\\nIf the referenced parameter is not present in the\\ncorresponding downstream DOCSIS QOS Parameter Set,\\nthe default value is 0. This parameter is\\nnot applicable to upstream DOCSIS QOS Parameter\\nSets, and its value is reported as 0 in this case.\")\ndocsIetfQosParamSetType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(1,3,2,)).subtype(namedValues=NamedValues((\"active\", 1), (\"admitted\", 2), (\"provisioned\", 3), ))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosParamSetType.setDescription(\"Defines the type of the QOS parameter set defined\\nby this row. active(1) indicates the Active QOS\\nparameter set, describing the service currently\\nbeing provided by the DOCSIS MAC domain to the\\nService Flow. admitted(2) indicates the Admitted\\nQOS Parameter Set, describing services reserved by\\nthe DOCSIS MAC domain for use by the service\\nflow. provisioned (3) describes the QOS Parameter\\nSet defined in the DOCSIS CM Configuration file for\\nthe Service Flow.\")\ndocsIetfQosParamSetRequestPolicyOct = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetRequestPolicyOct.setDescription(\"Specifies which transmit interval opportunities\\nthe CM omits for upstream transmission requests and\\npacket transmissions. This object takes its\\ndefault value for downstream Service Flows.\\n\\nUnless otherwise indicated, a bit value of 1 means\\nthat a CM must not use that opportunity for\\nupstream transmission.\\n\\nIf bit 0 is the least significant bit of the\\nleast significant (4th) octet, and if bit number\\nis increased with significance, the bit definitions\\nare defined as follows:\\n\\nbroadcastReqOpp(0):\\n all CMs broadcast request opportunities\\n\\npriorityReqMulticastReq(1):\\n priority request multicast request\\n opportunities\\n\\nreqDataForReq(2):\\n request/data opportunities for requests\\n\\nreqDataForData(3):\\n request/data opportunities for data\\n\\npiggybackReqWithData(4):\\n piggyback requests with data\\n\\nconcatenateData(5):\\n concatenate data\\n\\nfragmentData(6):\\n fragment data\\n\\nsuppresspayloadheaders(7):\\n suppress payload headers\\n\\n\\n\\n\\ndropPktsExceedUGSize(8):\\n A value of 1 means that the Service Flow must\\n drop packets that do not fit in the Unsolicited\\n Grant size.\\n\\nIf the referenced parameter is not present in\\na QOS Parameter Set, the value of this object is\\nreported as '00000000'H.\")\ndocsIetfQosParamSetBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 22), Bits().subtype(namedValues=NamedValues((\"trafficPriority\", 0), (\"maxTrafficRate\", 1), (\"nomPollInterval\", 10), (\"tolPollJitter\", 11), (\"unsolicitGrantSize\", 12), (\"nomGrantInterval\", 13), (\"tolGrantJitter\", 14), (\"grantsPerInterval\", 15), (\"tosOverwrite\", 16), (\"maxLatency\", 17), (\"maxTrafficBurst\", 2), (\"minReservedRate\", 3), (\"minReservedPkt\", 4), (\"activeTimeout\", 5), (\"admittedTimeout\", 6), (\"maxConcatBurst\", 7), (\"schedulingType\", 8), (\"requestPolicy\", 9), ))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosParamSetBitMap.setDescription(\"This object indicates the set of QOS Parameter\\nSet parameters actually signaled in the\\nDOCSIS registration or dynamic service request\\nmessage that created or modified the QOS Parameter\\nSet. A bit is set to 1 when the parameter described\\nby the indicated reference section is present\\nin the original request.\\n\\nNote that when Service Class names are expanded,\\nthe registration or dynamic response message may\\ncontain parameters as expanded by the CMTS based\\n\\n\\n\\non a stored service class. These expanded\\nparameters are not indicated by a 1 bit in this\\nobject.\\n\\nNote that even though some QOS Parameter Set\\nparameters may not be signaled in a message\\n(so that the paramater's bit in this object is 0),\\nthe DOCSIS specification requires that default\\nvalues be used. These default values are reported\\nas the corresponding object's value in the row.\\n\\nNote that BITS objects are encoded most\\nsignificant bit first. For example, if bits\\n1 and 16 are set, the value of this object\\nis the octet string '400080'H.\")\ndocsIetfQosServiceFlowTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 3))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowTable.setDescription(\"This table describes the set of DOCSIS-QOS\\nService Flows in a managed device.\")\ndocsIetfQosServiceFlowEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 3, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowId\"))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowEntry.setDescription(\"Describes a Service Flow.\\nAn entry in the table exists for each\\nService Flow ID. The ifIndex is an\\nifType of docsCableMaclayer(127).\")\ndocsIetfQosServiceFlowId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowId.setDescription(\"An index assigned to a Service Flow by CMTS.\")\ndocsIetfQosServiceFlowSID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16383))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowSID.setDescription(\"Service Identifier (SID) assigned to an\\nadmitted or active Service Flow. This object\\nreports a value of 0 if a Service ID is not\\nassociated with the Service Flow. Only active\\nor admitted upstream Service Flows will have a\\nService ID (SID).\")\ndocsIetfQosServiceFlowDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 3), DocsIetfQosRfMacIfDirection()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowDirection.setDescription(\"The direction of the Service Flow.\")\ndocsIetfQosServiceFlowPrimary = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 4), TruthValue()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowPrimary.setDescription(\"Object reflects whether Service Flow is the primary\\nor a secondary Service Flow.\\n\\nA primary Service Flow is the default Service Flow\\nfor otherwise unclassified traffic and all MAC\\nmessages.\")\ndocsIetfQosServiceFlowStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 4))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowStatsTable.setDescription(\"This table describes statistics associated with the\\nService Flows in a managed device.\")\ndocsIetfQosServiceFlowStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 4, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowId\"))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowStatsEntry.setDescription(\"Describes a set of Service Flow statistics.\\nAn entry in the table exists for each\\nService Flow ID. The ifIndex is an\\nifType of docsCableMaclayer(127).\")\ndocsIetfQosServiceFlowPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 1), Counter64()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowPkts.setDescription(\"For outgoing Service Flows, this object counts the\\nnumber of Packet Data PDUs forwarded to this\\nService Flow. For incoming upstream CMTS service\\nflows, this object counts the number of Packet\\nData PDUs actually received on the Service Flow\\nidentified by the SID for which the packet was\\nscheduled. CMs not classifying downstream packets\\nmay report this object's value as 0 for downstream\\nService Flows. This object does not count\\nMAC-specific management messages.\\n\\nParticularly for UGS flows, packets sent on the\\nprimary Service Flow in violation of the UGS grant\\nsize should be counted only by the instance of this\\nobject that is associated with the primary service\\n\\n\\n\\nflow.\\n\\nUnclassified upstream user data packets (i.e., non-\\nMAC-management) forwarded to the primary upstream\\nService Flow should be counted by the instance of\\nthis object that is associated with the primary\\nservice flow.\\n\\nThis object does include packets counted by\\ndocsIetfQosServiceFlowPolicedDelayPkts, but does not\\ninclude packets counted by\\ndocsIetfQosServiceFlowPolicedDropPkts\\nand docsIetfQosServiceFlowPHSUnknowns.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 2), Counter64()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowOctets.setDescription(\"The number of octets from the byte after the MAC\\nheader HCS to the end of the CRC for all packets\\ncounted in the docsIetfQosServiceFlowPkts object for\\nthis row. Note that this counts the octets after\\npayload header suppression and before payload\\nheader expansion have been applied.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowTimeCreated = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 3), TimeStamp()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowTimeCreated.setDescription(\"The value of sysUpTime when the service flow\\nwas created.\")\ndocsIetfQosServiceFlowTimeActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 4), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowTimeActive.setDescription(\"The number of seconds that the service flow\\nhas been active.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowPHSUnknowns = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 5), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowPHSUnknowns.setDescription(\"For incoming upstream CMTS service flows, this\\nobject counts the number of packets received\\nwith an unknown payload header suppression index.\\nThe service flow is identified by the SID for which\\nthe packet was scheduled.\\n\\nOn a CM, only this object's instance for the primary\\ndownstream service flow counts packets received with\\nan unknown payload header suppression index. All\\nother downstream service flows on CM report this\\nobjects value as 0.\\n\\nAll outgoing service flows report this object's\\nvalue as 0.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowPolicedDropPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 6), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowPolicedDropPkts.setDescription(\"For outgoing service flows, this object counts the\\nnumber of Packet Data PDUs classified to this\\nservice flow dropped due to:\\n (1) implementation-dependent excessive delay\\n while enforcing the Maximum Sustained\\n Traffic Rate; or\\n (2) UGS packets dropped due to exceeding the\\n Unsolicited Grant Size with a\\n Request/Transmission policy that requires\\n such packets to be dropped.\\n\\nClassified packets dropped due to other reasons\\n\\n\\n\\nmust be counted in ifOutDiscards for the interface\\nof this service flow. This object reports 0 for\\nincoming service flows.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowPolicedDelayPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 7), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowPolicedDelayPkts.setDescription(\"This object counts only outgoing packets delayed in\\norder to maintain the Maximum Sustained Traffic\\nRate. This object will always report a value of 0\\nfor UGS flows because the Maximum Sustained Traffic\\nRate does not apply. This object is 0 for incoming\\nservice flows.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosUpstreamStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 5))\nif mibBuilder.loadTexts: docsIetfQosUpstreamStatsTable.setDescription(\"This table describes statistics associated with\\nupstream service flows. All counted frames must\\nbe received without a Frame Check Sequence (FCS)\\nerror.\")\ndocsIetfQosUpstreamStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 5, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosSID\"))\nif mibBuilder.loadTexts: docsIetfQosUpstreamStatsEntry.setDescription(\"Describes a set of upstream service flow\\nstatistics. An entry in the table exists for each\\nupstream Service Flow in a managed device.\\nThe ifIndex is an ifType of\\ndocsCableMaclayer(127).\")\ndocsIetfQosSID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 16383))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosSID.setDescription(\"Identifies a service ID for an admitted or active\\nupstream service flow.\")\ndocsIetfQosUpstreamFragments = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosUpstreamFragments.setDescription(\"The number of fragmentation headers received on an\\nupstream service flow, regardless of whether\\nthe fragment was correctly reassembled into a\\nvalid packet.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosUpstreamFragDiscards = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 3), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosUpstreamFragDiscards.setDescription(\"The number of upstream fragments discarded and not\\nassembled into a valid upstream packet.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosUpstreamConcatBursts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 4), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosUpstreamConcatBursts.setDescription(\"The number of concatenation headers received on an\\nupstream service flow.\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicServiceStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 6))\nif mibBuilder.loadTexts: docsIetfQosDynamicServiceStatsTable.setDescription(\"This table describes statistics associated with the\\nDynamic Service Flows in a managed device.\")\ndocsIetfQosDynamicServiceStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 6, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosIfDirection\"))\nif mibBuilder.loadTexts: docsIetfQosDynamicServiceStatsEntry.setDescription(\"Describes a set of dynamic service flow statistics.\\nTwo entries exist for each DOCSIS MAC layer\\ninterface for the upstream and downstream\\ndirection. On the CMTS, the downstream direction\\nrow indicates messages transmitted or transactions\\noriginated by the CMTS. The upstream direction row\\nindicates messages received or transaction\\noriginated by the CM. On the CM, the downstream\\ndirection row indicates messages received or\\ntransactions originated by the CMTS. The upstream\\ndirection row indicates messages transmitted by\\nthe CM or transactions originated by the CM.\\nThe ifIndex is an ifType of\\ndocsCableMaclayer(127).\")\ndocsIetfQosIfDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 1), DocsIetfQosRfMacIfDirection()).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosIfDirection.setDescription(\"The direction of interface.\")\ndocsIetfQosDSAReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 2), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSAReqs.setDescription(\"The number of Dynamic Service Addition Requests,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDSARsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 3), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSARsps.setDescription(\"The number of Dynamic Service Addition Responses,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\n\\n\\n\\nindexes this object.\")\ndocsIetfQosDSAAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 4), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSAAcks.setDescription(\"The number of Dynamic Service Addition\\nAcknowledgements, including retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDSCReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 5), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSCReqs.setDescription(\"The number of Dynamic Service Change Requests,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDSCRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 6), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSCRsps.setDescription(\"The number of Dynamic Service Change Responses,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDSCAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 7), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSCAcks.setDescription(\"The number of Dynamic Service Change\\nAcknowledgements, including retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\n\\n\\n\\nindexes this object.\")\ndocsIetfQosDSDReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 8), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSDReqs.setDescription(\"The number of Dynamic Service Delete Requests,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDSDRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 9), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDSDRsps.setDescription(\"The number of Dynamic Service Delete Responses,\\nincluding retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicAdds = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 10), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicAdds.setDescription(\"The number of successful Dynamic Service Addition\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicAddFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 11), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicAddFails.setDescription(\"The number of failed Dynamic Service Addition\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\n\\n\\n\\nindexes this object.\")\ndocsIetfQosDynamicChanges = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 12), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicChanges.setDescription(\"The number of successful Dynamic Service Change\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicChangeFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 13), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicChangeFails.setDescription(\"The number of failed Dynamic Service Change\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicDeletes = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 14), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicDeletes.setDescription(\"The number of successful Dynamic Service Delete\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDynamicDeleteFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 15), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDynamicDeleteFails.setDescription(\"The number of failed Dynamic Service Delete\\ntransactions.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\n\\n\\n\\nindexes this object.\")\ndocsIetfQosDCCReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 16), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDCCReqs.setDescription(\"The number of Dynamic Channel Change Request\\nmessages traversing an interface. This count\\nis nonzero only on downstream direction rows.\\nThis count should include the number of retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex\\nthat indexes this object.\")\ndocsIetfQosDCCRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 17), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDCCRsps.setDescription(\"The number of Dynamic Channel Change Response\\nmessages traversing an interface. This count is\\nnonzero only on upstream direction rows. This count\\nshould include the number of retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDCCAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 18), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDCCAcks.setDescription(\"The number of Dynamic Channel Change Acknowledgement\\nmessages traversing an interface. This count\\nis nonzero only on downstream direction rows.\\nThis count should include the number of retries.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDCCs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 19), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDCCs.setDescription(\"The number of successful Dynamic Channel Change\\ntransactions. This count is nonzero only on\\ndownstream direction rows.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosDCCFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 20), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosDCCFails.setDescription(\"The number of failed Dynamic Channel Change\\ntransactions. This count is nonzero only on\\ndownstream direction rows.\\n\\nThis counter's last discontinuity is the\\nifCounterDiscontinuityTime for the same ifIndex that\\nindexes this object.\")\ndocsIetfQosServiceFlowLogTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 7))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogTable.setDescription(\"This table contains a log of the disconnected\\nService Flows in a managed device.\")\ndocsIetfQosServiceFlowLogEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 7, 1)).setIndexNames((0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogIndex\"))\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogEntry.setDescription(\"The information regarding a single disconnected\\nservice flow.\")\ndocsIetfQosServiceFlowLogIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogIndex.setDescription(\"Unique index for a logged service flow.\")\ndocsIetfQosServiceFlowLogIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 2), InterfaceIndex()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogIfIndex.setDescription(\"The ifIndex of ifType docsCableMaclayer(127)\\non the CMTS where the service flow was present.\")\ndocsIetfQosServiceFlowLogSFID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogSFID.setDescription(\"The index assigned to the service flow by the CMTS.\")\ndocsIetfQosServiceFlowLogCmMac = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 4), MacAddress()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogCmMac.setDescription(\"The MAC address for the cable modem associated with\\nthe service flow.\")\ndocsIetfQosServiceFlowLogPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 5), Counter64()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogPkts.setDescription(\"The number of packets counted on this service flow\\nafter payload header suppression.\")\ndocsIetfQosServiceFlowLogOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 6), Counter64()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogOctets.setDescription(\"The number of octets counted on this service flow\\nafter payload header suppression.\")\ndocsIetfQosServiceFlowLogTimeDeleted = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 7), TimeStamp()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeDeleted.setDescription(\"The value of sysUpTime when the service flow\\nwas deleted.\")\ndocsIetfQosServiceFlowLogTimeCreated = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 8), TimeStamp()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeCreated.setDescription(\"The value of sysUpTime when the service flow\\nwas created.\")\ndocsIetfQosServiceFlowLogTimeActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 9), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeActive.setDescription(\"The total time that the service flow was active.\")\ndocsIetfQosServiceFlowLogDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 10), DocsIetfQosRfMacIfDirection()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogDirection.setDescription(\"The value of docsIetfQosServiceFlowDirection\\nfor the service flow.\")\ndocsIetfQosServiceFlowLogPrimary = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 11), TruthValue()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogPrimary.setDescription(\"The value of docsIetfQosServiceFlowPrimary for the\\nservice flow.\")\ndocsIetfQosServiceFlowLogServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 12), SnmpAdminString()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogServiceClassName.setDescription(\"The value of docsIetfQosParamSetServiceClassName for\\nthe provisioned QOS Parameter Set of the\\nservice flow.\")\ndocsIetfQosServiceFlowLogPolicedDropPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 13), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogPolicedDropPkts.setDescription(\"The final value of\\ndocsIetfQosServiceFlowPolicedDropPkts for the\\nservice flow.\")\ndocsIetfQosServiceFlowLogPolicedDelayPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 14), Counter32()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogPolicedDelayPkts.setDescription(\"The final value of\\ndocsIetfQosServiceFlowPolicedDelayPkts for the\\nservice flow.\")\ndocsIetfQosServiceFlowLogControl = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 15), Integer().subtype(subtypeSpec=SingleValueConstraint(1,6,)).subtype(namedValues=NamedValues((\"active\", 1), (\"destroy\", 6), ))).setMaxAccess(\"readwrite\")\nif mibBuilder.loadTexts: docsIetfQosServiceFlowLogControl.setDescription(\"Setting this object to the value destroy(6) removes\\nthis entry from the table.\\n\\nReading this object returns the value active(1).\")\ndocsIetfQosServiceClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 8))\nif mibBuilder.loadTexts: docsIetfQosServiceClassTable.setDescription(\"This table describes the set of DOCSIS-QOS\\nService Classes in a CMTS.\")\ndocsIetfQosServiceClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 8, 1)).setIndexNames((0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassName\"))\nif mibBuilder.loadTexts: docsIetfQosServiceClassEntry.setDescription(\"A provisioned service class on a CMTS.\\nEach entry defines a template for certain\\nDOCSIS QOS Parameter Set values. When a CM\\ncreates or modifies an Admitted QOS Parameter Set\\nfor a Service Flow, it may reference a Service Class\\nName instead of providing explicit QOS Parameter\\nSet values. In this case, the CMTS populates\\nthe QOS Parameter Set with the applicable\\ncorresponding values from the named Service Class.\\nSubsequent changes to a Service Class row do not\\naffect the QOS Parameter Set values of any service\\nflows already admitted.\\n\\nA service class template applies to only\\na single direction, as indicated in the\\ndocsIetfQosServiceClassDirection object.\")\ndocsIetfQosServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassName.setDescription(\"Service Class Name. DOCSIS specifies that the\\nmaximum size is 16 ASCII characters including\\na terminating zero. The terminating zero is not\\nrepresented in this SnmpAdminString syntax object.\")\ndocsIetfQosServiceClassStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 2), RowStatus()).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassStatus.setDescription(\"Used to create or delete rows in this table.\\nThere is no restriction on the ability to change\\nvalues in this row while the row is active.\\nInactive rows need not be timed out.\")\ndocsIetfQosServiceClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)).clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPriority.setDescription(\"Template for docsIetfQosParamSetPriority.\")\ndocsIetfQosServiceClassMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 4), DocsIetfQosBitRate().clone('0')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMaxTrafficRate.setDescription(\"Template for docsIetfQosParamSetMaxTrafficRate.\")\ndocsIetfQosServiceClassMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 5), Unsigned32().clone(3044)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMaxTrafficBurst.setDescription(\"Template for docsIetfQosParamSetMaxTrafficBurst.\")\ndocsIetfQosServiceClassMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 6), DocsIetfQosBitRate().clone('0')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMinReservedRate.setDescription(\"Template for docsIetfQosParamSEtMinReservedRate.\")\ndocsIetfQosServiceClassMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMinReservedPkt.setDescription(\"Template for docsIetfQosParamSetMinReservedPkt.\")\ndocsIetfQosServiceClassMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(1522)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMaxConcatBurst.setDescription(\"Template for docsIetfQosParamSetMaxConcatBurst.\")\ndocsIetfQosServiceClassNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 9), Unsigned32().clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassNomPollInterval.setDescription(\"Template for docsIetfQosParamSetNomPollInterval.\")\ndocsIetfQosServiceClassTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 10), Unsigned32().clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassTolPollJitter.setDescription(\"Template for docsIetfQosParamSetTolPollJitter.\")\ndocsIetfQosServiceClassUnsolicitGrantSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassUnsolicitGrantSize.setDescription(\"Template for docsIetfQosParamSetUnsolicitGrantSize.\")\ndocsIetfQosServiceClassNomGrantInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 12), Unsigned32().clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassNomGrantInterval.setDescription(\"Template for docsIetfQosParamSetNomGrantInterval.\")\ndocsIetfQosServiceClassTolGrantJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 13), Unsigned32().clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassTolGrantJitter.setDescription(\"Template for docsIetfQosParamSetTolGrantJitter.\")\ndocsIetfQosServiceClassGrantsPerInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127)).clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassGrantsPerInterval.setDescription(\"Template for docsIetfQosParamSetGrantsPerInterval.\")\ndocsIetfQosServiceClassMaxLatency = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 15), Unsigned32().clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassMaxLatency.setDescription(\"Template for docsIetfQosParamSetClassMaxLatency.\")\ndocsIetfQosServiceClassActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(0)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassActiveTimeout.setDescription(\"Template for docsIetfQosParamSetActiveTimeout.\")\ndocsIetfQosServiceClassAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassAdmittedTimeout.setDescription(\"Template for docsIetfQosParamSetAdmittedTimeout.\")\ndocsIetfQosServiceClassSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 18), DocsIetfQosSchedulingType().clone('bestEffort')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassSchedulingType.setDescription(\"Template for docsIetfQosParamSetSchedulingType.\")\ndocsIetfQosServiceClassRequestPolicy = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4).clone(hexValue='00000000')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassRequestPolicy.setDescription(\"Template for docsIetfQosParamSetRequestPolicyOct.\")\ndocsIetfQosServiceClassTosAndMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 20), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassTosAndMask.setDescription(\"Template for docsIetfQosParamSetTosAndMask.\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). Network operators SHOULD avoid\\nspecifying values of\\ndocsIetfQosServiceClassTosAndMask and\\ndocsIetfQosServiceClassTosOrMask that would result\\nin the modification of the ECN bits.\\n\\n\\n\\nIn particular, operators should not use values of\\ndocsIetfQosServiceClassTosAndMask that have either\\nof the least-significant two bits set to 0.\\nSimilarly,operators should not use values of\\ndocsIetfQosServiceClassTosOrMask that have either\\nof the least-significant two bits set to 1.\")\ndocsIetfQosServiceClassTosOrMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassTosOrMask.setDescription(\"Template for docsIetfQosParamSetTosOrMask.\\nThe IP TOS octet as originally defined in RFC 791\\nhas been superseded by the 6-bit Differentiated\\nServices Field (DSField, RFC 3260) and the 2-bit\\nExplicit Congestion Notification Field (ECN field,\\nRFC 3168). Network operators SHOULD avoid\\nspecifying values of\\ndocsIetfQosServiceClassTosAndMask and\\ndocsIetfQosServiceClassTosOrMask that would result\\nin the modification of the ECN bits.\\n\\nIn particular, operators should not use values of\\ndocsIetfQosServiceClassTosAndMask that have either\\nof the least-significant two bits set to 0.\\nSimilarly, operators should not use values of\\ndocsIetfQosServiceClassTosOrMask that have either\\nof the least-significant two bits set to 1.\")\ndocsIetfQosServiceClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 22), DocsIetfQosRfMacIfDirection().clone('upstream')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassDirection.setDescription(\"Specifies whether the service class template\\napplies to upstream or downstream service flows.\")\ndocsIetfQosServiceClassStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 23), StorageType().clone('nonVolatile')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassStorageType.setDescription(\"This object defines whether this row is kept in\\nvolatile storage and lost upon reboot or whether\\nit is backed up by non-volatile or permanent\\nstorage. 'permanent' entries need not allow\\nwritable access to any object.\")\ndocsIetfQosServiceClassDSCPOverwrite = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 24), DscpOrAny().clone('-1')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassDSCPOverwrite.setDescription(\"This object allows the overwrite of the DSCP\\nfield per RFC 3260.\\n\\nIf this object is -1, then the corresponding entry's\\ndocsIetfQosServiceClassTosAndMask value MUST be\\n'FF'H and docsIetfQosServiceClassTosOrMask MUST be\\n'00'H. Otherwise, this object is in the range of\\n0..63, and the corresponding entry's\\ndocsIetfQosServiceClassTosAndMask value MUST be\\n'03'H and the docsIetfQosServiceClassTosOrMask MUST\\nbe this object's value shifted left by two bit\\npositions.\")\ndocsIetfQosServiceClassPolicyTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 9))\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyTable.setDescription(\"This table describes the set of DOCSIS-QOS\\nService Class Policies.\\n\\nThis table is an adjunct to the\\n\\n\\n\\ndocsDevFilterPolicy table. Entries in the\\ndocsDevFilterPolicy table can point to\\nspecific rows in this table.\\n\\nThis table permits mapping a packet to a service\\nclass name of an active service flow so long as\\na classifier does not exist at a higher\\npriority.\")\ndocsIetfQosServiceClassPolicyEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 9, 1)).setIndexNames((0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPolicyIndex\"))\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyEntry.setDescription(\"A service class name policy entry.\")\ndocsIetfQosServiceClassPolicyIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyIndex.setDescription(\"Index value to identify an entry in\\nthis table uniquely.\")\ndocsIetfQosServiceClassPolicyName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 2), SnmpAdminString()).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyName.setDescription(\"Service Class Name to identify the name of the\\nservice class flow to which the packet should be\\ndirected.\")\ndocsIetfQosServiceClassPolicyRulePriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyRulePriority.setDescription(\"Service Class Policy rule priority for the\\nentry.\")\ndocsIetfQosServiceClassPolicyStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 4), RowStatus()).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyStatus.setDescription(\"Used to create or delete rows in this table.\\nThis object should not be deleted if it is\\nreferenced by an entry in docsDevFilterPolicy.\\nThe reference should be deleted first.\\nThere is no restriction on the ability\\nto change values in this row while the row is\\nactive. Inactive rows need not be timed out.\")\ndocsIetfQosServiceClassPolicyStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess(\"readcreate\")\nif mibBuilder.loadTexts: docsIetfQosServiceClassPolicyStorageType.setDescription(\"This object defines whether this row is kept in\\nvolatile storage and lost upon reboot or whether\\nit is backed up by non-volatile or permanent\\nstorage. 'permanent' entries need not allow\\nwritable access to any object.\")\ndocsIetfQosPHSTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 10))\nif mibBuilder.loadTexts: docsIetfQosPHSTable.setDescription(\"This table describes the set of payload header\\nsuppression entries.\")\ndocsIetfQosPHSEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 10, 1)).setIndexNames((0, \"IF-MIB\", \"ifIndex\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowId\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassId\"))\nif mibBuilder.loadTexts: docsIetfQosPHSEntry.setDescription(\"A payload header suppression entry.\\n\\nThe ifIndex is an ifType of docsCableMaclayer(127).\\nThe index docsIetfQosServiceFlowId selects one\\nservice flow from the cable MAC layer interface.\\nThe docsIetfQosPktClassId index matches an\\nindex of the docsIetfQosPktClassTable.\")\ndocsIetfQosPHSField = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPHSField.setDescription(\"Payload header suppression field defines the\\nbytes of the header that must be\\nsuppressed/restored by the sending/receiving\\ndevice.\\n\\nThe number of octets in this object should be\\nthe same as the value of docsIetfQosPHSSize.\")\ndocsIetfQosPHSMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPHSMask.setDescription(\"Payload header suppression mask defines the\\nbit mask that is used in combination with the\\ndocsIetfQosPHSField. It defines which bytes in\\nthe header must be suppressed/restored by the\\nsending or receiving device.\\n\\nEach bit of this bit mask corresponds to a byte\\nin the docsIetfQosPHSField, with the least\\n\\n\\n\\nsignificant bit corresponding to the first byte\\nof the docsIetfQosPHSField.\\n\\nEach bit of the bit mask specifies whether\\nthe corresponding byte should be suppressed\\nin the packet. A bit value of '1' indicates that\\nthe byte should be suppressed by the sending\\ndevice and restored by the receiving device.\\nA bit value of '0' indicates that\\nthe byte should not be suppressed by the sending\\ndevice or restored by the receiving device.\\n\\nIf the bit mask does not contain a bit for each\\nbyte in the docsIetfQosPHSField, then the bit mask\\nis extended with bit values of '1' to be the\\nnecessary length.\")\ndocsIetfQosPHSSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPHSSize.setDescription(\"Payload header suppression size specifies the\\nnumber of bytes in the header to be suppressed\\nand restored.\\n\\nThe value of this object must match the number\\nof bytes in the docsIetfQosPHSField.\")\ndocsIetfQosPHSVerify = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 4), TruthValue()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPHSVerify.setDescription(\"Payload header suppression verification value. If\\n'true', the sender must verify docsIetfQosPHSField\\nis the same as what is contained in the packet\\nto be suppressed.\")\ndocsIetfQosPHSIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosPHSIndex.setDescription(\"Payload header suppression index uniquely\\n\\n\\n\\nreferences the PHS rule for a given service flow.\")\ndocsIetfQosCmtsMacToSrvFlowTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 11))\nif mibBuilder.loadTexts: docsIetfQosCmtsMacToSrvFlowTable.setDescription(\"This table provides for referencing the service\\nflows associated with a particular cable modem.\\nThis allows indexing into other docsIetfQos\\ntables that are indexed by docsIetfQosServiceFlowId\\nand ifIndex.\")\ndocsIetfQosCmtsMacToSrvFlowEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 11, 1)).setIndexNames((0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosCmtsCmMac\"), (0, \"DOCS-IETF-QOS-MIB\", \"docsIetfQosCmtsServiceFlowId\"))\nif mibBuilder.loadTexts: docsIetfQosCmtsMacToSrvFlowEntry.setDescription(\"An entry is created by CMTS for each service flow\\nconnected to this CMTS.\")\ndocsIetfQosCmtsCmMac = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 1), MacAddress()).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosCmtsCmMac.setDescription(\"The MAC address for the referenced CM.\")\ndocsIetfQosCmtsServiceFlowId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess(\"noaccess\")\nif mibBuilder.loadTexts: docsIetfQosCmtsServiceFlowId.setDescription(\"An index assigned to a service flow by CMTS.\")\ndocsIetfQosCmtsIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 3), InterfaceIndex()).setMaxAccess(\"readonly\")\nif mibBuilder.loadTexts: docsIetfQosCmtsIfIndex.setDescription(\"The ifIndex of ifType docsCableMacLayer(127)\\non the CMTS that is connected to the Cable Modem.\")\ndocsIetfQosConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2))\ndocsIetfQosGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2, 1))\ndocsIetfQosCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2, 2))\n\n# Augmentions\n\n# Groups\n\ndocsIetfQosBaseGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 1)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassUserPriLow\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassSourcePortStart\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassEnetProtocol\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassIpTosMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassInetDestAddr\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowTimeActive\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowTimeCreated\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassStateActive\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSAReqs\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSCAcks\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassInetDestMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDCCFails\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassDestPortStart\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassInetSourceMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSDRsps\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDCCReqs\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDCCs\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassPriority\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPHSMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPHSVerify\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPHSIndex\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSARsps\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassEnetProtocolType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassIpTosLow\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassInetSourceAddr\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPHSField\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSCReqs\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicChangeFails\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSDReqs\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassDestPortEnd\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicAdds\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassVlanId\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicDeleteFails\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicDeletes\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassIpProtocol\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowSID\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowPHSUnknowns\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowPrimary\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPHSSize\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassSourcePortEnd\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSAAcks\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowOctets\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDCCRsps\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassUserPriHigh\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowDirection\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDSCRsps\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowPolicedDelayPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowPolicedDropPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassIpTosHigh\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassSourceMacAddr\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassDestMacMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassDirection\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassDestMacAddr\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassBitMap\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicAddFails\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDCCAcks\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosPktClassInetAddressType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosDynamicChanges\"), ) )\nif mibBuilder.loadTexts: docsIetfQosBaseGroup.setDescription(\"Group of objects implemented in both Cable Modems and\\nCable Modem Termination Systems.\")\ndocsIetfQosParamSetGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 2)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMaxConcatBurst\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetGrantsPerInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMaxTrafficRate\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetActiveTimeout\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMinReservedPkt\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetPriority\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetRequestPolicyOct\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetServiceClassName\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetTosOrMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMinReservedRate\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMaxTrafficBurst\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetBitMap\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetSchedulingType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetTolPollJitter\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetTosAndMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetMaxLatency\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetTolGrantJitter\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetNomPollInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetNomGrantInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetAdmittedTimeout\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetUnsolicitGrantSize\"), ) )\nif mibBuilder.loadTexts: docsIetfQosParamSetGroup.setDescription(\"Group of objects implemented in both Cable Modems and\\nCable Modem Termination Systems for QOS Parameter Sets.\")\ndocsIetfQosCmtsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 3)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogSFID\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosUpstreamFragDiscards\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogPolicedDropPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogControl\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogTimeCreated\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogOctets\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosUpstreamConcatBursts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogCmMac\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogPrimary\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosCmtsIfIndex\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosUpstreamFragments\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogTimeActive\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogIfIndex\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogDirection\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogPolicedDelayPkts\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogServiceClassName\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceFlowLogTimeDeleted\"), ) )\nif mibBuilder.loadTexts: docsIetfQosCmtsGroup.setDescription(\"Group of objects implemented only in the CMTS.\")\ndocsIetfQosSrvClassPolicyGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 4)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPolicyStorageType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPolicyName\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPolicyRulePriority\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPolicyStatus\"), ) )\nif mibBuilder.loadTexts: docsIetfQosSrvClassPolicyGroup.setDescription(\"Group of objects implemented in both Cable Modems and\\nCable Modem Termination Systems when supporting policy-based\\nservice flows.\")\ndocsIetfQosServiceClassGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 5)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassSchedulingType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassNomGrantInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassTolGrantJitter\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassDSCPOverwrite\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassGrantsPerInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassDirection\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMaxTrafficBurst\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassPriority\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMaxTrafficRate\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassStorageType\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassTolPollJitter\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassTosOrMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassStatus\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMaxConcatBurst\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassTosAndMask\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassUnsolicitGrantSize\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassNomPollInterval\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassRequestPolicy\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMinReservedRate\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassActiveTimeout\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMinReservedPkt\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassAdmittedTimeout\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassMaxLatency\"), ) )\nif mibBuilder.loadTexts: docsIetfQosServiceClassGroup.setDescription(\"Group of objects implemented only in Cable Modem\\nTermination Systems when supporting expansion of Service\\nClass Names in a QOS Parameter Set\")\n\n# Compliances\n\ndocsIetfQosCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 127, 2, 2, 1)).setObjects(*((\"DOCS-IETF-QOS-MIB\", \"docsIetfQosCmtsGroup\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosServiceClassGroup\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosSrvClassPolicyGroup\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosBaseGroup\"), (\"DOCS-IETF-QOS-MIB\", \"docsIetfQosParamSetGroup\"), ) )\nif mibBuilder.loadTexts: docsIetfQosCompliance.setDescription(\"The compliance statement for MCNS Cable Modems and\\nCable Modem Termination Systems that implement DOCSIS\\nService Flows.\")\n\n# Exports\n\n# Module identity\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", PYSNMP_MODULE_ID=docsIetfQosMIB)\n\n# Types\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", DocsIetfQosBitRate=DocsIetfQosBitRate, DocsIetfQosRfMacIfDirection=DocsIetfQosRfMacIfDirection, DocsIetfQosSchedulingType=DocsIetfQosSchedulingType)\n\n# Objects\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", docsIetfQosMIB=docsIetfQosMIB, docsIetfQosNotifications=docsIetfQosNotifications, docsIetfQosMIBObjects=docsIetfQosMIBObjects, docsIetfQosPktClassTable=docsIetfQosPktClassTable, docsIetfQosPktClassEntry=docsIetfQosPktClassEntry, docsIetfQosPktClassId=docsIetfQosPktClassId, docsIetfQosPktClassDirection=docsIetfQosPktClassDirection, docsIetfQosPktClassPriority=docsIetfQosPktClassPriority, docsIetfQosPktClassIpTosLow=docsIetfQosPktClassIpTosLow, docsIetfQosPktClassIpTosHigh=docsIetfQosPktClassIpTosHigh, docsIetfQosPktClassIpTosMask=docsIetfQosPktClassIpTosMask, docsIetfQosPktClassIpProtocol=docsIetfQosPktClassIpProtocol, docsIetfQosPktClassInetAddressType=docsIetfQosPktClassInetAddressType, docsIetfQosPktClassInetSourceAddr=docsIetfQosPktClassInetSourceAddr, docsIetfQosPktClassInetSourceMask=docsIetfQosPktClassInetSourceMask, docsIetfQosPktClassInetDestAddr=docsIetfQosPktClassInetDestAddr, docsIetfQosPktClassInetDestMask=docsIetfQosPktClassInetDestMask, docsIetfQosPktClassSourcePortStart=docsIetfQosPktClassSourcePortStart, docsIetfQosPktClassSourcePortEnd=docsIetfQosPktClassSourcePortEnd, docsIetfQosPktClassDestPortStart=docsIetfQosPktClassDestPortStart, docsIetfQosPktClassDestPortEnd=docsIetfQosPktClassDestPortEnd, docsIetfQosPktClassDestMacAddr=docsIetfQosPktClassDestMacAddr, docsIetfQosPktClassDestMacMask=docsIetfQosPktClassDestMacMask, docsIetfQosPktClassSourceMacAddr=docsIetfQosPktClassSourceMacAddr, docsIetfQosPktClassEnetProtocolType=docsIetfQosPktClassEnetProtocolType, docsIetfQosPktClassEnetProtocol=docsIetfQosPktClassEnetProtocol, docsIetfQosPktClassUserPriLow=docsIetfQosPktClassUserPriLow, docsIetfQosPktClassUserPriHigh=docsIetfQosPktClassUserPriHigh, docsIetfQosPktClassVlanId=docsIetfQosPktClassVlanId, docsIetfQosPktClassStateActive=docsIetfQosPktClassStateActive, docsIetfQosPktClassPkts=docsIetfQosPktClassPkts, docsIetfQosPktClassBitMap=docsIetfQosPktClassBitMap, docsIetfQosParamSetTable=docsIetfQosParamSetTable, docsIetfQosParamSetEntry=docsIetfQosParamSetEntry, docsIetfQosParamSetServiceClassName=docsIetfQosParamSetServiceClassName, docsIetfQosParamSetPriority=docsIetfQosParamSetPriority, docsIetfQosParamSetMaxTrafficRate=docsIetfQosParamSetMaxTrafficRate, docsIetfQosParamSetMaxTrafficBurst=docsIetfQosParamSetMaxTrafficBurst, docsIetfQosParamSetMinReservedRate=docsIetfQosParamSetMinReservedRate, docsIetfQosParamSetMinReservedPkt=docsIetfQosParamSetMinReservedPkt, docsIetfQosParamSetActiveTimeout=docsIetfQosParamSetActiveTimeout, docsIetfQosParamSetAdmittedTimeout=docsIetfQosParamSetAdmittedTimeout, docsIetfQosParamSetMaxConcatBurst=docsIetfQosParamSetMaxConcatBurst, docsIetfQosParamSetSchedulingType=docsIetfQosParamSetSchedulingType, docsIetfQosParamSetNomPollInterval=docsIetfQosParamSetNomPollInterval, docsIetfQosParamSetTolPollJitter=docsIetfQosParamSetTolPollJitter, docsIetfQosParamSetUnsolicitGrantSize=docsIetfQosParamSetUnsolicitGrantSize, docsIetfQosParamSetNomGrantInterval=docsIetfQosParamSetNomGrantInterval, docsIetfQosParamSetTolGrantJitter=docsIetfQosParamSetTolGrantJitter, docsIetfQosParamSetGrantsPerInterval=docsIetfQosParamSetGrantsPerInterval, docsIetfQosParamSetTosAndMask=docsIetfQosParamSetTosAndMask, docsIetfQosParamSetTosOrMask=docsIetfQosParamSetTosOrMask, docsIetfQosParamSetMaxLatency=docsIetfQosParamSetMaxLatency, docsIetfQosParamSetType=docsIetfQosParamSetType, docsIetfQosParamSetRequestPolicyOct=docsIetfQosParamSetRequestPolicyOct, docsIetfQosParamSetBitMap=docsIetfQosParamSetBitMap, docsIetfQosServiceFlowTable=docsIetfQosServiceFlowTable, docsIetfQosServiceFlowEntry=docsIetfQosServiceFlowEntry, docsIetfQosServiceFlowId=docsIetfQosServiceFlowId, docsIetfQosServiceFlowSID=docsIetfQosServiceFlowSID, docsIetfQosServiceFlowDirection=docsIetfQosServiceFlowDirection, docsIetfQosServiceFlowPrimary=docsIetfQosServiceFlowPrimary, docsIetfQosServiceFlowStatsTable=docsIetfQosServiceFlowStatsTable, docsIetfQosServiceFlowStatsEntry=docsIetfQosServiceFlowStatsEntry, docsIetfQosServiceFlowPkts=docsIetfQosServiceFlowPkts, docsIetfQosServiceFlowOctets=docsIetfQosServiceFlowOctets, docsIetfQosServiceFlowTimeCreated=docsIetfQosServiceFlowTimeCreated, docsIetfQosServiceFlowTimeActive=docsIetfQosServiceFlowTimeActive, docsIetfQosServiceFlowPHSUnknowns=docsIetfQosServiceFlowPHSUnknowns, docsIetfQosServiceFlowPolicedDropPkts=docsIetfQosServiceFlowPolicedDropPkts, docsIetfQosServiceFlowPolicedDelayPkts=docsIetfQosServiceFlowPolicedDelayPkts, docsIetfQosUpstreamStatsTable=docsIetfQosUpstreamStatsTable, docsIetfQosUpstreamStatsEntry=docsIetfQosUpstreamStatsEntry, docsIetfQosSID=docsIetfQosSID, docsIetfQosUpstreamFragments=docsIetfQosUpstreamFragments, docsIetfQosUpstreamFragDiscards=docsIetfQosUpstreamFragDiscards, docsIetfQosUpstreamConcatBursts=docsIetfQosUpstreamConcatBursts, docsIetfQosDynamicServiceStatsTable=docsIetfQosDynamicServiceStatsTable, docsIetfQosDynamicServiceStatsEntry=docsIetfQosDynamicServiceStatsEntry, docsIetfQosIfDirection=docsIetfQosIfDirection, docsIetfQosDSAReqs=docsIetfQosDSAReqs, docsIetfQosDSARsps=docsIetfQosDSARsps, docsIetfQosDSAAcks=docsIetfQosDSAAcks, docsIetfQosDSCReqs=docsIetfQosDSCReqs, docsIetfQosDSCRsps=docsIetfQosDSCRsps, docsIetfQosDSCAcks=docsIetfQosDSCAcks, docsIetfQosDSDReqs=docsIetfQosDSDReqs, docsIetfQosDSDRsps=docsIetfQosDSDRsps, docsIetfQosDynamicAdds=docsIetfQosDynamicAdds, docsIetfQosDynamicAddFails=docsIetfQosDynamicAddFails, docsIetfQosDynamicChanges=docsIetfQosDynamicChanges, docsIetfQosDynamicChangeFails=docsIetfQosDynamicChangeFails, docsIetfQosDynamicDeletes=docsIetfQosDynamicDeletes, docsIetfQosDynamicDeleteFails=docsIetfQosDynamicDeleteFails, docsIetfQosDCCReqs=docsIetfQosDCCReqs, docsIetfQosDCCRsps=docsIetfQosDCCRsps, docsIetfQosDCCAcks=docsIetfQosDCCAcks, docsIetfQosDCCs=docsIetfQosDCCs, docsIetfQosDCCFails=docsIetfQosDCCFails, docsIetfQosServiceFlowLogTable=docsIetfQosServiceFlowLogTable, docsIetfQosServiceFlowLogEntry=docsIetfQosServiceFlowLogEntry, docsIetfQosServiceFlowLogIndex=docsIetfQosServiceFlowLogIndex, docsIetfQosServiceFlowLogIfIndex=docsIetfQosServiceFlowLogIfIndex, docsIetfQosServiceFlowLogSFID=docsIetfQosServiceFlowLogSFID, docsIetfQosServiceFlowLogCmMac=docsIetfQosServiceFlowLogCmMac, docsIetfQosServiceFlowLogPkts=docsIetfQosServiceFlowLogPkts, docsIetfQosServiceFlowLogOctets=docsIetfQosServiceFlowLogOctets, docsIetfQosServiceFlowLogTimeDeleted=docsIetfQosServiceFlowLogTimeDeleted, docsIetfQosServiceFlowLogTimeCreated=docsIetfQosServiceFlowLogTimeCreated, docsIetfQosServiceFlowLogTimeActive=docsIetfQosServiceFlowLogTimeActive, docsIetfQosServiceFlowLogDirection=docsIetfQosServiceFlowLogDirection, docsIetfQosServiceFlowLogPrimary=docsIetfQosServiceFlowLogPrimary, docsIetfQosServiceFlowLogServiceClassName=docsIetfQosServiceFlowLogServiceClassName, docsIetfQosServiceFlowLogPolicedDropPkts=docsIetfQosServiceFlowLogPolicedDropPkts, docsIetfQosServiceFlowLogPolicedDelayPkts=docsIetfQosServiceFlowLogPolicedDelayPkts, docsIetfQosServiceFlowLogControl=docsIetfQosServiceFlowLogControl, docsIetfQosServiceClassTable=docsIetfQosServiceClassTable, docsIetfQosServiceClassEntry=docsIetfQosServiceClassEntry, docsIetfQosServiceClassName=docsIetfQosServiceClassName, docsIetfQosServiceClassStatus=docsIetfQosServiceClassStatus, docsIetfQosServiceClassPriority=docsIetfQosServiceClassPriority, docsIetfQosServiceClassMaxTrafficRate=docsIetfQosServiceClassMaxTrafficRate, docsIetfQosServiceClassMaxTrafficBurst=docsIetfQosServiceClassMaxTrafficBurst, docsIetfQosServiceClassMinReservedRate=docsIetfQosServiceClassMinReservedRate, docsIetfQosServiceClassMinReservedPkt=docsIetfQosServiceClassMinReservedPkt, docsIetfQosServiceClassMaxConcatBurst=docsIetfQosServiceClassMaxConcatBurst)\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", docsIetfQosServiceClassNomPollInterval=docsIetfQosServiceClassNomPollInterval, docsIetfQosServiceClassTolPollJitter=docsIetfQosServiceClassTolPollJitter, docsIetfQosServiceClassUnsolicitGrantSize=docsIetfQosServiceClassUnsolicitGrantSize, docsIetfQosServiceClassNomGrantInterval=docsIetfQosServiceClassNomGrantInterval, docsIetfQosServiceClassTolGrantJitter=docsIetfQosServiceClassTolGrantJitter, docsIetfQosServiceClassGrantsPerInterval=docsIetfQosServiceClassGrantsPerInterval, docsIetfQosServiceClassMaxLatency=docsIetfQosServiceClassMaxLatency, docsIetfQosServiceClassActiveTimeout=docsIetfQosServiceClassActiveTimeout, docsIetfQosServiceClassAdmittedTimeout=docsIetfQosServiceClassAdmittedTimeout, docsIetfQosServiceClassSchedulingType=docsIetfQosServiceClassSchedulingType, docsIetfQosServiceClassRequestPolicy=docsIetfQosServiceClassRequestPolicy, docsIetfQosServiceClassTosAndMask=docsIetfQosServiceClassTosAndMask, docsIetfQosServiceClassTosOrMask=docsIetfQosServiceClassTosOrMask, docsIetfQosServiceClassDirection=docsIetfQosServiceClassDirection, docsIetfQosServiceClassStorageType=docsIetfQosServiceClassStorageType, docsIetfQosServiceClassDSCPOverwrite=docsIetfQosServiceClassDSCPOverwrite, docsIetfQosServiceClassPolicyTable=docsIetfQosServiceClassPolicyTable, docsIetfQosServiceClassPolicyEntry=docsIetfQosServiceClassPolicyEntry, docsIetfQosServiceClassPolicyIndex=docsIetfQosServiceClassPolicyIndex, docsIetfQosServiceClassPolicyName=docsIetfQosServiceClassPolicyName, docsIetfQosServiceClassPolicyRulePriority=docsIetfQosServiceClassPolicyRulePriority, docsIetfQosServiceClassPolicyStatus=docsIetfQosServiceClassPolicyStatus, docsIetfQosServiceClassPolicyStorageType=docsIetfQosServiceClassPolicyStorageType, docsIetfQosPHSTable=docsIetfQosPHSTable, docsIetfQosPHSEntry=docsIetfQosPHSEntry, docsIetfQosPHSField=docsIetfQosPHSField, docsIetfQosPHSMask=docsIetfQosPHSMask, docsIetfQosPHSSize=docsIetfQosPHSSize, docsIetfQosPHSVerify=docsIetfQosPHSVerify, docsIetfQosPHSIndex=docsIetfQosPHSIndex, docsIetfQosCmtsMacToSrvFlowTable=docsIetfQosCmtsMacToSrvFlowTable, docsIetfQosCmtsMacToSrvFlowEntry=docsIetfQosCmtsMacToSrvFlowEntry, docsIetfQosCmtsCmMac=docsIetfQosCmtsCmMac, docsIetfQosCmtsServiceFlowId=docsIetfQosCmtsServiceFlowId, docsIetfQosCmtsIfIndex=docsIetfQosCmtsIfIndex, docsIetfQosConformance=docsIetfQosConformance, docsIetfQosGroups=docsIetfQosGroups, docsIetfQosCompliances=docsIetfQosCompliances)\n\n# Groups\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", docsIetfQosBaseGroup=docsIetfQosBaseGroup, docsIetfQosParamSetGroup=docsIetfQosParamSetGroup, docsIetfQosCmtsGroup=docsIetfQosCmtsGroup, docsIetfQosSrvClassPolicyGroup=docsIetfQosSrvClassPolicyGroup, docsIetfQosServiceClassGroup=docsIetfQosServiceClassGroup)\n\n# Compliances\nmibBuilder.exportSymbols(\"DOCS-IETF-QOS-MIB\", docsIetfQosCompliance=docsIetfQosCompliance)\n",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
# terminal based game in Python
from random import randint
print('Terminal based number guessing game')
while True:
try:
numberOfGames = int(input('Please choose how many games you want to play ---> '))
except:
print('Only numbes accepted')
continue
if (numberOfGames > 0 and numberOfGames < 10):
break;
randomNumbers = []
for i in range(numberOfGames):
randomNumbers.append(randint(1, 10))
for index, number in enumerate(randomNumbers):
print('Game %i' %(index + 1))
guess = 0
attempts = 0
while (guess != number):
try:
guess = int(input('Guess the number ---> '))
except Exception as e:
print('Only numbers accepted')
continue
if (guess > number):
print('Your number is bigger!')
else:
print('Your number is smaller!')
attempts += 1
print('Great you guessed it! Attempts %i' %attempts)
attempts = 0
|
normal
|
{
"blob_id": "20c081dc47f541a988bccef89b8e51f446c80f58",
"index": 5471,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint('Terminal based number guessing game')\nwhile True:\n try:\n numberOfGames = int(input(\n 'Please choose how many games you want to play ---> '))\n except:\n print('Only numbes accepted')\n continue\n if numberOfGames > 0 and numberOfGames < 10:\n break\n<mask token>\nfor i in range(numberOfGames):\n randomNumbers.append(randint(1, 10))\nfor index, number in enumerate(randomNumbers):\n print('Game %i' % (index + 1))\n guess = 0\n attempts = 0\n while guess != number:\n try:\n guess = int(input('Guess the number ---> '))\n except Exception as e:\n print('Only numbers accepted')\n continue\n if guess > number:\n print('Your number is bigger!')\n else:\n print('Your number is smaller!')\n attempts += 1\n print('Great you guessed it! Attempts %i' % attempts)\n attempts = 0\n",
"step-3": "<mask token>\nprint('Terminal based number guessing game')\nwhile True:\n try:\n numberOfGames = int(input(\n 'Please choose how many games you want to play ---> '))\n except:\n print('Only numbes accepted')\n continue\n if numberOfGames > 0 and numberOfGames < 10:\n break\nrandomNumbers = []\nfor i in range(numberOfGames):\n randomNumbers.append(randint(1, 10))\nfor index, number in enumerate(randomNumbers):\n print('Game %i' % (index + 1))\n guess = 0\n attempts = 0\n while guess != number:\n try:\n guess = int(input('Guess the number ---> '))\n except Exception as e:\n print('Only numbers accepted')\n continue\n if guess > number:\n print('Your number is bigger!')\n else:\n print('Your number is smaller!')\n attempts += 1\n print('Great you guessed it! Attempts %i' % attempts)\n attempts = 0\n",
"step-4": "from random import randint\nprint('Terminal based number guessing game')\nwhile True:\n try:\n numberOfGames = int(input(\n 'Please choose how many games you want to play ---> '))\n except:\n print('Only numbes accepted')\n continue\n if numberOfGames > 0 and numberOfGames < 10:\n break\nrandomNumbers = []\nfor i in range(numberOfGames):\n randomNumbers.append(randint(1, 10))\nfor index, number in enumerate(randomNumbers):\n print('Game %i' % (index + 1))\n guess = 0\n attempts = 0\n while guess != number:\n try:\n guess = int(input('Guess the number ---> '))\n except Exception as e:\n print('Only numbers accepted')\n continue\n if guess > number:\n print('Your number is bigger!')\n else:\n print('Your number is smaller!')\n attempts += 1\n print('Great you guessed it! Attempts %i' % attempts)\n attempts = 0\n",
"step-5": "# terminal based game in Python\nfrom random import randint\n\nprint('Terminal based number guessing game')\nwhile True:\n try:\n numberOfGames = int(input('Please choose how many games you want to play ---> '))\n except:\n print('Only numbes accepted')\n continue\n if (numberOfGames > 0 and numberOfGames < 10):\n break;\n\nrandomNumbers = []\n\nfor i in range(numberOfGames):\n randomNumbers.append(randint(1, 10))\n\nfor index, number in enumerate(randomNumbers):\n print('Game %i' %(index + 1))\n guess = 0\n attempts = 0\n while (guess != number):\n try:\n guess = int(input('Guess the number ---> '))\n except Exception as e:\n print('Only numbers accepted')\n continue\n if (guess > number):\n print('Your number is bigger!')\n else:\n print('Your number is smaller!')\n attempts += 1\n print('Great you guessed it! Attempts %i' %attempts)\n attempts = 0\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def create_all(engine):
ORMBase.metadata.create_all(engine)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
ORMBase = declarative_base()
def create_all(engine):
ORMBase.metadata.create_all(engine)
<|reserved_special_token_1|>
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, ForeignKey, Float
from sqlalchemy.orm import relationship, backref
ORMBase = declarative_base()
def create_all(engine):
ORMBase.metadata.create_all(engine)
|
flexible
|
{
"blob_id": "c7ca8235864ce5de188c4aa2feb9ad82d4fa9b0f",
"index": 4023,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_all(engine):\n ORMBase.metadata.create_all(engine)\n",
"step-3": "<mask token>\nORMBase = declarative_base()\n\n\ndef create_all(engine):\n ORMBase.metadata.create_all(engine)\n",
"step-4": "from sqlalchemy.ext.declarative import declarative_base\nfrom sqlalchemy import Column, Integer, String, ForeignKey, Float\nfrom sqlalchemy.orm import relationship, backref\nORMBase = declarative_base()\n\n\ndef create_all(engine):\n ORMBase.metadata.create_all(engine)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TodoViewset(viewsets.ModelViewSet):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class TodoViewset(viewsets.ModelViewSet):
queryset = models.Todo.objects.all()
serializer_class = serializers.TodoSerializer
<|reserved_special_token_1|>
from django.shortcuts import render
from rest_framework import status, viewsets, response
from . import models
from . import serializers
class TodoViewset(viewsets.ModelViewSet):
queryset = models.Todo.objects.all()
serializer_class = serializers.TodoSerializer
<|reserved_special_token_1|>
from django.shortcuts import render
from rest_framework import status, viewsets , response
from . import models
from . import serializers
# Create your views here.
class TodoViewset(viewsets.ModelViewSet):
queryset = models.Todo.objects.all()
serializer_class = serializers.TodoSerializer
|
flexible
|
{
"blob_id": "1c668cf6f145b85a09b248fefda46e928de64e41",
"index": 5041,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass TodoViewset(viewsets.ModelViewSet):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass TodoViewset(viewsets.ModelViewSet):\n queryset = models.Todo.objects.all()\n serializer_class = serializers.TodoSerializer\n",
"step-4": "from django.shortcuts import render\nfrom rest_framework import status, viewsets, response\nfrom . import models\nfrom . import serializers\n\n\nclass TodoViewset(viewsets.ModelViewSet):\n queryset = models.Todo.objects.all()\n serializer_class = serializers.TodoSerializer\n",
"step-5": "from django.shortcuts import render\nfrom rest_framework import status, viewsets , response\n\nfrom . import models\nfrom . import serializers\n\n# Create your views here.\n\nclass TodoViewset(viewsets.ModelViewSet):\n queryset = models.Todo.objects.all()\n serializer_class = serializers.TodoSerializer\n \n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
print('hello world123')
|
flexible
|
{
"blob_id": "004a02f7ff49cb1b63ebedfcfcb4937377859099",
"index": 1187,
"step-1": "<mask token>\n",
"step-2": "print('hello world123')\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
from django.urls import path
from .views import (
TreeCreateView,
TreeListView,
TreeUpdateView,
)
app_name = 'trees'
urlpatterns = [
path('list/', TreeListView.as_view(),
name='list'),
path('create/', TreeCreateView.as_view(),
name='create'),
path('<int:pk>/update/', TreeCreateView.as_view(),
name='update'),
]
|
normal
|
{
"blob_id": "0c1de2c1eb5a4de7aeb14ad6b27aa61e07bc4c51",
"index": 602,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'trees'\nurlpatterns = [path('list/', TreeListView.as_view(), name='list'), path(\n 'create/', TreeCreateView.as_view(), name='create'), path(\n '<int:pk>/update/', TreeCreateView.as_view(), name='update')]\n",
"step-3": "from django.urls import path\nfrom .views import TreeCreateView, TreeListView, TreeUpdateView\napp_name = 'trees'\nurlpatterns = [path('list/', TreeListView.as_view(), name='list'), path(\n 'create/', TreeCreateView.as_view(), name='create'), path(\n '<int:pk>/update/', TreeCreateView.as_view(), name='update')]\n",
"step-4": "from django.urls import path\nfrom .views import (\n TreeCreateView,\n TreeListView,\n TreeUpdateView,\n)\n\n\napp_name = 'trees'\n\nurlpatterns = [\n path('list/', TreeListView.as_view(),\n name='list'),\n path('create/', TreeCreateView.as_view(),\n name='create'),\n path('<int:pk>/update/', TreeCreateView.as_view(),\n name='update'),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import numpy as np
import pandas as pd
import logging
import matplotlib.pyplot as plt
from sklearn.impute import SimpleImputer
from sklearn.preprocessing import LabelEncoder, OneHotEncoder, StandardScaler, RobustScaler
from sklearn.compose import ColumnTransformer
from sklearn.pipeline import Pipeline, make_pipeline
from sklearn.linear_model import LinearRegression
import datetime
from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor
from sklearn.model_selection import KFold, cross_val_score, train_test_split
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
# ignore warnings
import warnings
warnings.filterwarnings(action="ignore")
df_train = pd.read_csv('./Scripts/pages/train.csv')
df_store = pd.read_csv('./Scripts/pages/store.csv')
df_test = pd.read_csv('./Scripts/pages/test.csv')
merged_train = pd.merge(left = df_train, right = df_store, how = 'inner', left_on = 'Store', right_on = 'Store')
merged_test = pd.merge(left = df_test, right = df_store, how = 'inner', left_on = 'Store', right_on = 'Store')
def preprocess_data(train, test):
# '''preprocessing'''
global train_features, test_features, train_target, categorical, numerical
# train and target features
train_features = train.drop(['Sales', 'Customers'], axis = 1) #drop the target feature + customers (~ will not be used for prediction)
train_target = train[['Sales']]
test_features = test.drop(['Id'], axis = 1) #drop id, it's required only during submission
#feature generation + transformations
try:
train_features['Date'] = pd.to_datetime(train_features.Date)
train_features['Month'] = train_features.Date.dt.month.to_list()
train_features['Year'] = train_features.Date.dt.year.to_list()
train_features['Day'] = train_features.Date.dt.day.to_list()
train_features['WeekOfYear'] = train_features.Date.dt.weekofyear.to_list()
train_features['DayOfWeek'] = train_features.Date.dt.dayofweek.to_list()
train_features['weekday'] = 1 # Initialize the column with default value of 1
train_features.loc[train_features['DayOfWeek'] == 5, 'weekday'] = 0
train_features.loc[train_features['DayOfWeek'] == 6, 'weekday'] = 0
train_features = train_features.drop(['Store'], axis = 1)
test_features['Date'] = pd.to_datetime(test_features.Date)
test_features['Month'] = test_features.Date.dt.month.to_list()
test_features['Year'] = test_features.Date.dt.year.to_list()
test_features['Day'] = test_features.Date.dt.day.to_list()
test_features['WeekOfYear'] = test_features.Date.dt.weekofyear.to_list()
test_features['DayOfWeek'] = test_features.Date.dt.dayofweek.to_list()
test_features['weekday'] = 1 # Initialize the column with default value of 1
test_features.loc[test_features['DayOfWeek'] == 5, 'weekday'] = 0
test_features.loc[test_features['DayOfWeek'] == 6, 'weekday'] = 0
test_features = test_features.drop(['Store'], axis = 1)
except KeyError:
print("Column couldn't be found")
# numerical and categorical columns (train set)
categorical = []
numerical = []
timestamp = []
for col in train_features.columns:
if train_features[col].dtype == object:
categorical.append(col)
elif train_features[col].dtype in ['int16', 'int32', 'int64', 'float16', 'float32', 'float64']:
numerical.append(col)
else:
timestamp.append(col)
# Keep selected columns only
my_cols = categorical + numerical + timestamp
train_features = train_features[my_cols].copy()
test_features = test_features[my_cols].copy()
features = pd.concat([train_features, test_features]) #merge the features columns for uniform preprocessing
# change dtypes for uniformity in preprocessing
features.CompetitionOpenSinceMonth = features.CompetitionOpenSinceMonth.astype('Int64')
features.CompetitionOpenSinceYear = features.CompetitionOpenSinceYear.astype('Int64')
features.Promo2SinceWeek = features.Promo2SinceWeek.astype('Int64')
features.Promo2SinceYear = features.Promo2SinceYear.astype('Int64')
features["StateHoliday"].loc[features["StateHoliday"] == 0] = "0"
# ''' actual preprocessing: the mighty pipeline '''
# numeric
for col in ['CompetitionDistance', 'CompetitionOpenSinceMonth', 'CompetitionOpenSinceYear', 'Promo2SinceWeek', 'Promo2SinceYear']:
features[col] = features[col].fillna((int(features[col].mean())))
features.PromoInterval = features.PromoInterval.fillna(features.PromoInterval.mode()[0])
features.Open = features.Open.fillna(features.Open.mode()[0])
features = pd.get_dummies(features, columns=['StoreType', 'Assortment', 'PromoInterval', 'StateHoliday'])
scaler = RobustScaler()
c = ['DayOfWeek', 'Open', 'Promo', 'SchoolHoliday', 'CompetitionDistance', 'CompetitionOpenSinceMonth', 'CompetitionOpenSinceYear',
'Promo2', 'Promo2SinceWeek', 'Promo2SinceYear', 'WeekOfYear', 'Month', 'Year', 'Day', 'WeekOfYear', 'weekday']
features[numerical] = scaler.fit_transform(features[numerical].values)
return features
features = preprocess_data(merged_train, merged_test)
features = features.drop(['Date'], axis = 1)
# reconstruct train and test sets
def reconstruct_sets(features):
global x_train, x_val, y_train, y_val
# global train_set
# original train and test sets
x_train = features.iloc[:len(train_features), :]
x_test = features.iloc[len(train_features):, :]
y_train = train_target
# train_set = pd.concat([x_train, y_train], axis=1)
# updated train and validation sets
x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, test_size = .20, random_state = 0)
return x_train, x_val, y_train, y_val, x_test
x_train, x_val, y_train, y_val, x_test = reconstruct_sets(features)
clf=RandomForestRegressor(n_estimators=14)
clf.fit(x_train,y_train)
y_pred = clf.predict(x_val)
print("MSE =", mean_squared_error(y_val, y_pred))
print("Mean R2 score =", r2_score(y_val, y_pred))
print("MAE =", mean_absolute_error(y_val, y_pred))
|
normal
|
{
"blob_id": "dc51ca86a49dbec6f714753782494f21d4b1591d",
"index": 9091,
"step-1": "<mask token>\n\n\ndef preprocess_data(train, test):\n global train_features, test_features, train_target, categorical, numerical\n train_features = train.drop(['Sales', 'Customers'], axis=1)\n train_target = train[['Sales']]\n test_features = test.drop(['Id'], axis=1)\n try:\n train_features['Date'] = pd.to_datetime(train_features.Date)\n train_features['Month'] = train_features.Date.dt.month.to_list()\n train_features['Year'] = train_features.Date.dt.year.to_list()\n train_features['Day'] = train_features.Date.dt.day.to_list()\n train_features['WeekOfYear'\n ] = train_features.Date.dt.weekofyear.to_list()\n train_features['DayOfWeek'] = train_features.Date.dt.dayofweek.to_list(\n )\n train_features['weekday'] = 1\n train_features.loc[train_features['DayOfWeek'] == 5, 'weekday'] = 0\n train_features.loc[train_features['DayOfWeek'] == 6, 'weekday'] = 0\n train_features = train_features.drop(['Store'], axis=1)\n test_features['Date'] = pd.to_datetime(test_features.Date)\n test_features['Month'] = test_features.Date.dt.month.to_list()\n test_features['Year'] = test_features.Date.dt.year.to_list()\n test_features['Day'] = test_features.Date.dt.day.to_list()\n test_features['WeekOfYear'] = test_features.Date.dt.weekofyear.to_list(\n )\n test_features['DayOfWeek'] = test_features.Date.dt.dayofweek.to_list()\n test_features['weekday'] = 1\n test_features.loc[test_features['DayOfWeek'] == 5, 'weekday'] = 0\n test_features.loc[test_features['DayOfWeek'] == 6, 'weekday'] = 0\n test_features = test_features.drop(['Store'], axis=1)\n except KeyError:\n print(\"Column couldn't be found\")\n categorical = []\n numerical = []\n timestamp = []\n for col in train_features.columns:\n if train_features[col].dtype == object:\n categorical.append(col)\n elif train_features[col].dtype in ['int16', 'int32', 'int64',\n 'float16', 'float32', 'float64']:\n numerical.append(col)\n else:\n timestamp.append(col)\n my_cols = categorical + numerical + timestamp\n train_features = train_features[my_cols].copy()\n test_features = test_features[my_cols].copy()\n features = pd.concat([train_features, test_features])\n features.CompetitionOpenSinceMonth = (features.\n CompetitionOpenSinceMonth.astype('Int64'))\n features.CompetitionOpenSinceYear = (features.CompetitionOpenSinceYear.\n astype('Int64'))\n features.Promo2SinceWeek = features.Promo2SinceWeek.astype('Int64')\n features.Promo2SinceYear = features.Promo2SinceYear.astype('Int64')\n features['StateHoliday'].loc[features['StateHoliday'] == 0] = '0'\n for col in ['CompetitionDistance', 'CompetitionOpenSinceMonth',\n 'CompetitionOpenSinceYear', 'Promo2SinceWeek', 'Promo2SinceYear']:\n features[col] = features[col].fillna(int(features[col].mean()))\n features.PromoInterval = features.PromoInterval.fillna(features.\n PromoInterval.mode()[0])\n features.Open = features.Open.fillna(features.Open.mode()[0])\n features = pd.get_dummies(features, columns=['StoreType', 'Assortment',\n 'PromoInterval', 'StateHoliday'])\n scaler = RobustScaler()\n c = ['DayOfWeek', 'Open', 'Promo', 'SchoolHoliday',\n 'CompetitionDistance', 'CompetitionOpenSinceMonth',\n 'CompetitionOpenSinceYear', 'Promo2', 'Promo2SinceWeek',\n 'Promo2SinceYear', 'WeekOfYear', 'Month', 'Year', 'Day',\n 'WeekOfYear', 'weekday']\n features[numerical] = scaler.fit_transform(features[numerical].values)\n return features\n\n\n<mask token>\n\n\ndef reconstruct_sets(features):\n global x_train, x_val, y_train, y_val\n x_train = features.iloc[:len(train_features), :]\n x_test = features.iloc[len(train_features):, :]\n y_train = train_target\n x_train, x_val, y_train, y_val = train_test_split(x_train, y_train,\n test_size=0.2, random_state=0)\n return x_train, x_val, y_train, y_val, x_test\n\n\n<mask token>\n",
"step-2": "<mask token>\nwarnings.filterwarnings(action='ignore')\n<mask token>\n\n\ndef preprocess_data(train, test):\n global train_features, test_features, train_target, categorical, numerical\n train_features = train.drop(['Sales', 'Customers'], axis=1)\n train_target = train[['Sales']]\n test_features = test.drop(['Id'], axis=1)\n try:\n train_features['Date'] = pd.to_datetime(train_features.Date)\n train_features['Month'] = train_features.Date.dt.month.to_list()\n train_features['Year'] = train_features.Date.dt.year.to_list()\n train_features['Day'] = train_features.Date.dt.day.to_list()\n train_features['WeekOfYear'\n ] = train_features.Date.dt.weekofyear.to_list()\n train_features['DayOfWeek'] = train_features.Date.dt.dayofweek.to_list(\n )\n train_features['weekday'] = 1\n train_features.loc[train_features['DayOfWeek'] == 5, 'weekday'] = 0\n train_features.loc[train_features['DayOfWeek'] == 6, 'weekday'] = 0\n train_features = train_features.drop(['Store'], axis=1)\n test_features['Date'] = pd.to_datetime(test_features.Date)\n test_features['Month'] = test_features.Date.dt.month.to_list()\n test_features['Year'] = test_features.Date.dt.year.to_list()\n test_features['Day'] = test_features.Date.dt.day.to_list()\n test_features['WeekOfYear'] = test_features.Date.dt.weekofyear.to_list(\n )\n test_features['DayOfWeek'] = test_features.Date.dt.dayofweek.to_list()\n test_features['weekday'] = 1\n test_features.loc[test_features['DayOfWeek'] == 5, 'weekday'] = 0\n test_features.loc[test_features['DayOfWeek'] == 6, 'weekday'] = 0\n test_features = test_features.drop(['Store'], axis=1)\n except KeyError:\n print(\"Column couldn't be found\")\n categorical = []\n numerical = []\n timestamp = []\n for col in train_features.columns:\n if train_features[col].dtype == object:\n categorical.append(col)\n elif train_features[col].dtype in ['int16', 'int32', 'int64',\n 'float16', 'float32', 'float64']:\n numerical.append(col)\n else:\n timestamp.append(col)\n my_cols = categorical + numerical + timestamp\n train_features = train_features[my_cols].copy()\n test_features = test_features[my_cols].copy()\n features = pd.concat([train_features, test_features])\n features.CompetitionOpenSinceMonth = (features.\n CompetitionOpenSinceMonth.astype('Int64'))\n features.CompetitionOpenSinceYear = (features.CompetitionOpenSinceYear.\n astype('Int64'))\n features.Promo2SinceWeek = features.Promo2SinceWeek.astype('Int64')\n features.Promo2SinceYear = features.Promo2SinceYear.astype('Int64')\n features['StateHoliday'].loc[features['StateHoliday'] == 0] = '0'\n for col in ['CompetitionDistance', 'CompetitionOpenSinceMonth',\n 'CompetitionOpenSinceYear', 'Promo2SinceWeek', 'Promo2SinceYear']:\n features[col] = features[col].fillna(int(features[col].mean()))\n features.PromoInterval = features.PromoInterval.fillna(features.\n PromoInterval.mode()[0])\n features.Open = features.Open.fillna(features.Open.mode()[0])\n features = pd.get_dummies(features, columns=['StoreType', 'Assortment',\n 'PromoInterval', 'StateHoliday'])\n scaler = RobustScaler()\n c = ['DayOfWeek', 'Open', 'Promo', 'SchoolHoliday',\n 'CompetitionDistance', 'CompetitionOpenSinceMonth',\n 'CompetitionOpenSinceYear', 'Promo2', 'Promo2SinceWeek',\n 'Promo2SinceYear', 'WeekOfYear', 'Month', 'Year', 'Day',\n 'WeekOfYear', 'weekday']\n features[numerical] = scaler.fit_transform(features[numerical].values)\n return features\n\n\n<mask token>\n\n\ndef reconstruct_sets(features):\n global x_train, x_val, y_train, y_val\n x_train = features.iloc[:len(train_features), :]\n x_test = features.iloc[len(train_features):, :]\n y_train = train_target\n x_train, x_val, y_train, y_val = train_test_split(x_train, y_train,\n test_size=0.2, random_state=0)\n return x_train, x_val, y_train, y_val, x_test\n\n\n<mask token>\nclf.fit(x_train, y_train)\n<mask token>\nprint('MSE =', mean_squared_error(y_val, y_pred))\nprint('Mean R2 score =', r2_score(y_val, y_pred))\nprint('MAE =', mean_absolute_error(y_val, y_pred))\n",
"step-3": "<mask token>\nwarnings.filterwarnings(action='ignore')\ndf_train = pd.read_csv('./Scripts/pages/train.csv')\ndf_store = pd.read_csv('./Scripts/pages/store.csv')\ndf_test = pd.read_csv('./Scripts/pages/test.csv')\nmerged_train = pd.merge(left=df_train, right=df_store, how='inner', left_on\n ='Store', right_on='Store')\nmerged_test = pd.merge(left=df_test, right=df_store, how='inner', left_on=\n 'Store', right_on='Store')\n\n\ndef preprocess_data(train, test):\n global train_features, test_features, train_target, categorical, numerical\n train_features = train.drop(['Sales', 'Customers'], axis=1)\n train_target = train[['Sales']]\n test_features = test.drop(['Id'], axis=1)\n try:\n train_features['Date'] = pd.to_datetime(train_features.Date)\n train_features['Month'] = train_features.Date.dt.month.to_list()\n train_features['Year'] = train_features.Date.dt.year.to_list()\n train_features['Day'] = train_features.Date.dt.day.to_list()\n train_features['WeekOfYear'\n ] = train_features.Date.dt.weekofyear.to_list()\n train_features['DayOfWeek'] = train_features.Date.dt.dayofweek.to_list(\n )\n train_features['weekday'] = 1\n train_features.loc[train_features['DayOfWeek'] == 5, 'weekday'] = 0\n train_features.loc[train_features['DayOfWeek'] == 6, 'weekday'] = 0\n train_features = train_features.drop(['Store'], axis=1)\n test_features['Date'] = pd.to_datetime(test_features.Date)\n test_features['Month'] = test_features.Date.dt.month.to_list()\n test_features['Year'] = test_features.Date.dt.year.to_list()\n test_features['Day'] = test_features.Date.dt.day.to_list()\n test_features['WeekOfYear'] = test_features.Date.dt.weekofyear.to_list(\n )\n test_features['DayOfWeek'] = test_features.Date.dt.dayofweek.to_list()\n test_features['weekday'] = 1\n test_features.loc[test_features['DayOfWeek'] == 5, 'weekday'] = 0\n test_features.loc[test_features['DayOfWeek'] == 6, 'weekday'] = 0\n test_features = test_features.drop(['Store'], axis=1)\n except KeyError:\n print(\"Column couldn't be found\")\n categorical = []\n numerical = []\n timestamp = []\n for col in train_features.columns:\n if train_features[col].dtype == object:\n categorical.append(col)\n elif train_features[col].dtype in ['int16', 'int32', 'int64',\n 'float16', 'float32', 'float64']:\n numerical.append(col)\n else:\n timestamp.append(col)\n my_cols = categorical + numerical + timestamp\n train_features = train_features[my_cols].copy()\n test_features = test_features[my_cols].copy()\n features = pd.concat([train_features, test_features])\n features.CompetitionOpenSinceMonth = (features.\n CompetitionOpenSinceMonth.astype('Int64'))\n features.CompetitionOpenSinceYear = (features.CompetitionOpenSinceYear.\n astype('Int64'))\n features.Promo2SinceWeek = features.Promo2SinceWeek.astype('Int64')\n features.Promo2SinceYear = features.Promo2SinceYear.astype('Int64')\n features['StateHoliday'].loc[features['StateHoliday'] == 0] = '0'\n for col in ['CompetitionDistance', 'CompetitionOpenSinceMonth',\n 'CompetitionOpenSinceYear', 'Promo2SinceWeek', 'Promo2SinceYear']:\n features[col] = features[col].fillna(int(features[col].mean()))\n features.PromoInterval = features.PromoInterval.fillna(features.\n PromoInterval.mode()[0])\n features.Open = features.Open.fillna(features.Open.mode()[0])\n features = pd.get_dummies(features, columns=['StoreType', 'Assortment',\n 'PromoInterval', 'StateHoliday'])\n scaler = RobustScaler()\n c = ['DayOfWeek', 'Open', 'Promo', 'SchoolHoliday',\n 'CompetitionDistance', 'CompetitionOpenSinceMonth',\n 'CompetitionOpenSinceYear', 'Promo2', 'Promo2SinceWeek',\n 'Promo2SinceYear', 'WeekOfYear', 'Month', 'Year', 'Day',\n 'WeekOfYear', 'weekday']\n features[numerical] = scaler.fit_transform(features[numerical].values)\n return features\n\n\nfeatures = preprocess_data(merged_train, merged_test)\nfeatures = features.drop(['Date'], axis=1)\n\n\ndef reconstruct_sets(features):\n global x_train, x_val, y_train, y_val\n x_train = features.iloc[:len(train_features), :]\n x_test = features.iloc[len(train_features):, :]\n y_train = train_target\n x_train, x_val, y_train, y_val = train_test_split(x_train, y_train,\n test_size=0.2, random_state=0)\n return x_train, x_val, y_train, y_val, x_test\n\n\nx_train, x_val, y_train, y_val, x_test = reconstruct_sets(features)\nclf = RandomForestRegressor(n_estimators=14)\nclf.fit(x_train, y_train)\ny_pred = clf.predict(x_val)\nprint('MSE =', mean_squared_error(y_val, y_pred))\nprint('Mean R2 score =', r2_score(y_val, y_pred))\nprint('MAE =', mean_absolute_error(y_val, y_pred))\n",
"step-4": "import numpy as np\nimport pandas as pd\nimport logging\nimport matplotlib.pyplot as plt\nfrom sklearn.impute import SimpleImputer\nfrom sklearn.preprocessing import LabelEncoder, OneHotEncoder, StandardScaler, RobustScaler\nfrom sklearn.compose import ColumnTransformer\nfrom sklearn.pipeline import Pipeline, make_pipeline\nfrom sklearn.linear_model import LinearRegression\nimport datetime\nfrom sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor\nfrom sklearn.model_selection import KFold, cross_val_score, train_test_split\nfrom sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score\nimport warnings\nwarnings.filterwarnings(action='ignore')\ndf_train = pd.read_csv('./Scripts/pages/train.csv')\ndf_store = pd.read_csv('./Scripts/pages/store.csv')\ndf_test = pd.read_csv('./Scripts/pages/test.csv')\nmerged_train = pd.merge(left=df_train, right=df_store, how='inner', left_on\n ='Store', right_on='Store')\nmerged_test = pd.merge(left=df_test, right=df_store, how='inner', left_on=\n 'Store', right_on='Store')\n\n\ndef preprocess_data(train, test):\n global train_features, test_features, train_target, categorical, numerical\n train_features = train.drop(['Sales', 'Customers'], axis=1)\n train_target = train[['Sales']]\n test_features = test.drop(['Id'], axis=1)\n try:\n train_features['Date'] = pd.to_datetime(train_features.Date)\n train_features['Month'] = train_features.Date.dt.month.to_list()\n train_features['Year'] = train_features.Date.dt.year.to_list()\n train_features['Day'] = train_features.Date.dt.day.to_list()\n train_features['WeekOfYear'\n ] = train_features.Date.dt.weekofyear.to_list()\n train_features['DayOfWeek'] = train_features.Date.dt.dayofweek.to_list(\n )\n train_features['weekday'] = 1\n train_features.loc[train_features['DayOfWeek'] == 5, 'weekday'] = 0\n train_features.loc[train_features['DayOfWeek'] == 6, 'weekday'] = 0\n train_features = train_features.drop(['Store'], axis=1)\n test_features['Date'] = pd.to_datetime(test_features.Date)\n test_features['Month'] = test_features.Date.dt.month.to_list()\n test_features['Year'] = test_features.Date.dt.year.to_list()\n test_features['Day'] = test_features.Date.dt.day.to_list()\n test_features['WeekOfYear'] = test_features.Date.dt.weekofyear.to_list(\n )\n test_features['DayOfWeek'] = test_features.Date.dt.dayofweek.to_list()\n test_features['weekday'] = 1\n test_features.loc[test_features['DayOfWeek'] == 5, 'weekday'] = 0\n test_features.loc[test_features['DayOfWeek'] == 6, 'weekday'] = 0\n test_features = test_features.drop(['Store'], axis=1)\n except KeyError:\n print(\"Column couldn't be found\")\n categorical = []\n numerical = []\n timestamp = []\n for col in train_features.columns:\n if train_features[col].dtype == object:\n categorical.append(col)\n elif train_features[col].dtype in ['int16', 'int32', 'int64',\n 'float16', 'float32', 'float64']:\n numerical.append(col)\n else:\n timestamp.append(col)\n my_cols = categorical + numerical + timestamp\n train_features = train_features[my_cols].copy()\n test_features = test_features[my_cols].copy()\n features = pd.concat([train_features, test_features])\n features.CompetitionOpenSinceMonth = (features.\n CompetitionOpenSinceMonth.astype('Int64'))\n features.CompetitionOpenSinceYear = (features.CompetitionOpenSinceYear.\n astype('Int64'))\n features.Promo2SinceWeek = features.Promo2SinceWeek.astype('Int64')\n features.Promo2SinceYear = features.Promo2SinceYear.astype('Int64')\n features['StateHoliday'].loc[features['StateHoliday'] == 0] = '0'\n for col in ['CompetitionDistance', 'CompetitionOpenSinceMonth',\n 'CompetitionOpenSinceYear', 'Promo2SinceWeek', 'Promo2SinceYear']:\n features[col] = features[col].fillna(int(features[col].mean()))\n features.PromoInterval = features.PromoInterval.fillna(features.\n PromoInterval.mode()[0])\n features.Open = features.Open.fillna(features.Open.mode()[0])\n features = pd.get_dummies(features, columns=['StoreType', 'Assortment',\n 'PromoInterval', 'StateHoliday'])\n scaler = RobustScaler()\n c = ['DayOfWeek', 'Open', 'Promo', 'SchoolHoliday',\n 'CompetitionDistance', 'CompetitionOpenSinceMonth',\n 'CompetitionOpenSinceYear', 'Promo2', 'Promo2SinceWeek',\n 'Promo2SinceYear', 'WeekOfYear', 'Month', 'Year', 'Day',\n 'WeekOfYear', 'weekday']\n features[numerical] = scaler.fit_transform(features[numerical].values)\n return features\n\n\nfeatures = preprocess_data(merged_train, merged_test)\nfeatures = features.drop(['Date'], axis=1)\n\n\ndef reconstruct_sets(features):\n global x_train, x_val, y_train, y_val\n x_train = features.iloc[:len(train_features), :]\n x_test = features.iloc[len(train_features):, :]\n y_train = train_target\n x_train, x_val, y_train, y_val = train_test_split(x_train, y_train,\n test_size=0.2, random_state=0)\n return x_train, x_val, y_train, y_val, x_test\n\n\nx_train, x_val, y_train, y_val, x_test = reconstruct_sets(features)\nclf = RandomForestRegressor(n_estimators=14)\nclf.fit(x_train, y_train)\ny_pred = clf.predict(x_val)\nprint('MSE =', mean_squared_error(y_val, y_pred))\nprint('Mean R2 score =', r2_score(y_val, y_pred))\nprint('MAE =', mean_absolute_error(y_val, y_pred))\n",
"step-5": "import numpy as np\nimport pandas as pd \nimport logging\nimport matplotlib.pyplot as plt\nfrom sklearn.impute import SimpleImputer\nfrom sklearn.preprocessing import LabelEncoder, OneHotEncoder, StandardScaler, RobustScaler\nfrom sklearn.compose import ColumnTransformer\nfrom sklearn.pipeline import Pipeline, make_pipeline\nfrom sklearn.linear_model import LinearRegression\nimport datetime\nfrom sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor\nfrom sklearn.model_selection import KFold, cross_val_score, train_test_split\nfrom sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score\n\n# ignore warnings\nimport warnings\nwarnings.filterwarnings(action=\"ignore\")\n\ndf_train = pd.read_csv('./Scripts/pages/train.csv')\ndf_store = pd.read_csv('./Scripts/pages/store.csv')\ndf_test = pd.read_csv('./Scripts/pages/test.csv')\n\nmerged_train = pd.merge(left = df_train, right = df_store, how = 'inner', left_on = 'Store', right_on = 'Store')\nmerged_test = pd.merge(left = df_test, right = df_store, how = 'inner', left_on = 'Store', right_on = 'Store')\n\n\ndef preprocess_data(train, test):\n \n # '''preprocessing'''\n global train_features, test_features, train_target, categorical, numerical\n\n # train and target features\n train_features = train.drop(['Sales', 'Customers'], axis = 1) #drop the target feature + customers (~ will not be used for prediction)\n train_target = train[['Sales']]\n test_features = test.drop(['Id'], axis = 1) #drop id, it's required only during submission\n\n #feature generation + transformations\n try:\n train_features['Date'] = pd.to_datetime(train_features.Date)\n train_features['Month'] = train_features.Date.dt.month.to_list()\n train_features['Year'] = train_features.Date.dt.year.to_list()\n train_features['Day'] = train_features.Date.dt.day.to_list()\n train_features['WeekOfYear'] = train_features.Date.dt.weekofyear.to_list()\n train_features['DayOfWeek'] = train_features.Date.dt.dayofweek.to_list()\n train_features['weekday'] = 1 # Initialize the column with default value of 1\n train_features.loc[train_features['DayOfWeek'] == 5, 'weekday'] = 0\n train_features.loc[train_features['DayOfWeek'] == 6, 'weekday'] = 0\n train_features = train_features.drop(['Store'], axis = 1)\n\n test_features['Date'] = pd.to_datetime(test_features.Date)\n test_features['Month'] = test_features.Date.dt.month.to_list()\n test_features['Year'] = test_features.Date.dt.year.to_list()\n test_features['Day'] = test_features.Date.dt.day.to_list()\n test_features['WeekOfYear'] = test_features.Date.dt.weekofyear.to_list()\n test_features['DayOfWeek'] = test_features.Date.dt.dayofweek.to_list()\n test_features['weekday'] = 1 # Initialize the column with default value of 1\n test_features.loc[test_features['DayOfWeek'] == 5, 'weekday'] = 0\n test_features.loc[test_features['DayOfWeek'] == 6, 'weekday'] = 0\n test_features = test_features.drop(['Store'], axis = 1)\n except KeyError:\n print(\"Column couldn't be found\")\n\n # numerical and categorical columns (train set)\n categorical = []\n numerical = []\n timestamp = []\n\n for col in train_features.columns:\n if train_features[col].dtype == object:\n categorical.append(col)\n elif train_features[col].dtype in ['int16', 'int32', 'int64', 'float16', 'float32', 'float64']:\n numerical.append(col)\n else:\n timestamp.append(col)\n\n # Keep selected columns only\n my_cols = categorical + numerical + timestamp\n train_features = train_features[my_cols].copy()\n test_features = test_features[my_cols].copy()\n features = pd.concat([train_features, test_features]) #merge the features columns for uniform preprocessing\n\n # change dtypes for uniformity in preprocessing\n features.CompetitionOpenSinceMonth = features.CompetitionOpenSinceMonth.astype('Int64') \n features.CompetitionOpenSinceYear = features.CompetitionOpenSinceYear.astype('Int64')\n features.Promo2SinceWeek = features.Promo2SinceWeek.astype('Int64') \n features.Promo2SinceYear = features.Promo2SinceYear.astype('Int64')\n features[\"StateHoliday\"].loc[features[\"StateHoliday\"] == 0] = \"0\"\n\n\n\n # ''' actual preprocessing: the mighty pipeline '''\n # numeric\n for col in ['CompetitionDistance', 'CompetitionOpenSinceMonth', 'CompetitionOpenSinceYear', 'Promo2SinceWeek', 'Promo2SinceYear']:\n features[col] = features[col].fillna((int(features[col].mean()))) \n features.PromoInterval = features.PromoInterval.fillna(features.PromoInterval.mode()[0])\n features.Open = features.Open.fillna(features.Open.mode()[0])\n features = pd.get_dummies(features, columns=['StoreType', 'Assortment', 'PromoInterval', 'StateHoliday'])\n \n scaler = RobustScaler()\n c = ['DayOfWeek', 'Open', 'Promo', 'SchoolHoliday', 'CompetitionDistance', 'CompetitionOpenSinceMonth', 'CompetitionOpenSinceYear',\n 'Promo2', 'Promo2SinceWeek', 'Promo2SinceYear', 'WeekOfYear', 'Month', 'Year', 'Day', 'WeekOfYear', 'weekday']\n features[numerical] = scaler.fit_transform(features[numerical].values)\n \n\n return features\n\n\n\nfeatures = preprocess_data(merged_train, merged_test)\nfeatures = features.drop(['Date'], axis = 1)\n\n\n# reconstruct train and test sets\ndef reconstruct_sets(features):\n global x_train, x_val, y_train, y_val\n # global train_set\n # original train and test sets\n x_train = features.iloc[:len(train_features), :]\n x_test = features.iloc[len(train_features):, :]\n y_train = train_target\n # train_set = pd.concat([x_train, y_train], axis=1)\n\n # updated train and validation sets\n x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, test_size = .20, random_state = 0)\n\n\n return x_train, x_val, y_train, y_val, x_test\n\nx_train, x_val, y_train, y_val, x_test = reconstruct_sets(features)\n\nclf=RandomForestRegressor(n_estimators=14)\nclf.fit(x_train,y_train)\ny_pred = clf.predict(x_val)\nprint(\"MSE =\", mean_squared_error(y_val, y_pred))\nprint(\"Mean R2 score =\", r2_score(y_val, y_pred))\nprint(\"MAE =\", mean_absolute_error(y_val, y_pred))\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class ConsensusSimulation:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def run_sim(self, record_all=False, update_every=1.0):
"""run the core simulation"""
t = 0
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
self.X.append(self.x)
self.T.append(0)
start = time.time()
time_since_last_update = 0.0
progress = 1
while self.agreement() == False:
start_it = time.time()
if t == 0 and self.warn and not nx.is_connected(self.graph):
print(
'Graph not connected, consensus algorithm will probably not converge!'
)
print('Simulating to 5 seconds...')
flag = True
if flag and time.time() - start > 5:
break
self.x = self.x + self.dt * self.f(self.x, *self.f_arg)
if record_all:
self.X.append(self.x)
self.T.append(time.time() - start)
elif t - np.floor(t) < 0.01:
self.X.append(self.x)
self.T.append(time.time() - start)
t = t + self.dt
end = time.time() - start_it
time_since_last_update += end
if time_since_last_update >= update_every:
sys.stdout.write('\r' +
'Iteration: {}, disagreement: {}, time: {}'.format(
progress, self.disagreement(), time.time() - start))
sys.stdout.flush()
time_since_last_update = 0.0
progress += 1
print('')
end = time.time()
return self.T[-1]
def sim_delay(self, delay=1, runtime=100):
t = 0
self.tau = delay
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
for i in range(0, delay + 1):
self.X.append(self.x)
self.T.append(0)
start = time.time()
while self.agreement() == False:
if self.T[-1] > runtime:
break
if t == 0 and self.warn and not nx.is_connected(self.graph):
print(
'Graph not connected, consensus algorithm will probably not converge!'
)
print('Simulating to 5 seconds...')
flag = True
if flag and time.time() - start > 5:
break
self.x = self.X[-1]
if len(self.X) - delay < 0:
pass
else:
index = len(self.X) - delay
self.x = self.X[-1] + self.dt * self.f(self.X[index], *self
.f_arg)
self.X.append(self.x)
self.T.append(time.time() - start)
t = t + self.dt
end = time.time()
return self.T[-1]
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ConsensusSimulation:
<|reserved_special_token_0|>
def __init__(self, topology, dynamics, dynamics_args, time_step=0.01,
x_init=None, convergence_warning=True, delay=0):
if isinstance(topology, nx.Graph):
self.graph = topology
self.size = len(self.graph)
else:
print('Argument Error: topology must be type', type(nx.Graph()))
if callable(dynamics):
self.f = dynamics
if len(dynamics_args) == 1:
self.f_arg = dynamics_args, 1
self.f_arg = dynamics_args
else:
print('Argument Error: dynamics must be a function')
self.dt = time_step
self.tau = delay
if not isinstance(x_init, type(np.ones(1))) and x_init == None:
self.x = np.linspace(1, self.size, self.size)
self.x = self.x.reshape(self.size, 1)
else:
self.x = x_init.copy().reshape(self.size, 1)
self.L = nx.laplacian_matrix(self.graph).todense()
self.X = list()
self.T = list()
self.warn = convergence_warning
self.d_max = max(np.array(self.graph.degree)[:, 1])
self.tau_max = np.pi / (4 * self.d_max)
def disagreement(self):
"""Returns the 'error'/inhomogeneity in the
decision vector"""
return 0.5 * np.dot(np.dot(np.transpose(self.x), self.L), self.x).item(
0)
def agreement(self, tol=1e-06):
"""Test for convergence"""
if self.disagreement() < tol:
return True
else:
return False
def run_sim(self, record_all=False, update_every=1.0):
"""run the core simulation"""
t = 0
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
self.X.append(self.x)
self.T.append(0)
start = time.time()
time_since_last_update = 0.0
progress = 1
while self.agreement() == False:
start_it = time.time()
if t == 0 and self.warn and not nx.is_connected(self.graph):
print(
'Graph not connected, consensus algorithm will probably not converge!'
)
print('Simulating to 5 seconds...')
flag = True
if flag and time.time() - start > 5:
break
self.x = self.x + self.dt * self.f(self.x, *self.f_arg)
if record_all:
self.X.append(self.x)
self.T.append(time.time() - start)
elif t - np.floor(t) < 0.01:
self.X.append(self.x)
self.T.append(time.time() - start)
t = t + self.dt
end = time.time() - start_it
time_since_last_update += end
if time_since_last_update >= update_every:
sys.stdout.write('\r' +
'Iteration: {}, disagreement: {}, time: {}'.format(
progress, self.disagreement(), time.time() - start))
sys.stdout.flush()
time_since_last_update = 0.0
progress += 1
print('')
end = time.time()
return self.T[-1]
def sim_delay(self, delay=1, runtime=100):
t = 0
self.tau = delay
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
for i in range(0, delay + 1):
self.X.append(self.x)
self.T.append(0)
start = time.time()
while self.agreement() == False:
if self.T[-1] > runtime:
break
if t == 0 and self.warn and not nx.is_connected(self.graph):
print(
'Graph not connected, consensus algorithm will probably not converge!'
)
print('Simulating to 5 seconds...')
flag = True
if flag and time.time() - start > 5:
break
self.x = self.X[-1]
if len(self.X) - delay < 0:
pass
else:
index = len(self.X) - delay
self.x = self.X[-1] + self.dt * self.f(self.X[index], *self
.f_arg)
self.X.append(self.x)
self.T.append(time.time() - start)
t = t + self.dt
end = time.time()
return self.T[-1]
<|reserved_special_token_0|>
def print_delay(self):
print('Delay in seconds')
return self.dt * self.tau
def delay_stable_max(self):
d = maximum_degree(self.graph)
return np.pi / (4 * d[1])
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ConsensusSimulation:
<|reserved_special_token_0|>
def __init__(self, topology, dynamics, dynamics_args, time_step=0.01,
x_init=None, convergence_warning=True, delay=0):
if isinstance(topology, nx.Graph):
self.graph = topology
self.size = len(self.graph)
else:
print('Argument Error: topology must be type', type(nx.Graph()))
if callable(dynamics):
self.f = dynamics
if len(dynamics_args) == 1:
self.f_arg = dynamics_args, 1
self.f_arg = dynamics_args
else:
print('Argument Error: dynamics must be a function')
self.dt = time_step
self.tau = delay
if not isinstance(x_init, type(np.ones(1))) and x_init == None:
self.x = np.linspace(1, self.size, self.size)
self.x = self.x.reshape(self.size, 1)
else:
self.x = x_init.copy().reshape(self.size, 1)
self.L = nx.laplacian_matrix(self.graph).todense()
self.X = list()
self.T = list()
self.warn = convergence_warning
self.d_max = max(np.array(self.graph.degree)[:, 1])
self.tau_max = np.pi / (4 * self.d_max)
def disagreement(self):
"""Returns the 'error'/inhomogeneity in the
decision vector"""
return 0.5 * np.dot(np.dot(np.transpose(self.x), self.L), self.x).item(
0)
def agreement(self, tol=1e-06):
"""Test for convergence"""
if self.disagreement() < tol:
return True
else:
return False
def run_sim(self, record_all=False, update_every=1.0):
"""run the core simulation"""
t = 0
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
self.X.append(self.x)
self.T.append(0)
start = time.time()
time_since_last_update = 0.0
progress = 1
while self.agreement() == False:
start_it = time.time()
if t == 0 and self.warn and not nx.is_connected(self.graph):
print(
'Graph not connected, consensus algorithm will probably not converge!'
)
print('Simulating to 5 seconds...')
flag = True
if flag and time.time() - start > 5:
break
self.x = self.x + self.dt * self.f(self.x, *self.f_arg)
if record_all:
self.X.append(self.x)
self.T.append(time.time() - start)
elif t - np.floor(t) < 0.01:
self.X.append(self.x)
self.T.append(time.time() - start)
t = t + self.dt
end = time.time() - start_it
time_since_last_update += end
if time_since_last_update >= update_every:
sys.stdout.write('\r' +
'Iteration: {}, disagreement: {}, time: {}'.format(
progress, self.disagreement(), time.time() - start))
sys.stdout.flush()
time_since_last_update = 0.0
progress += 1
print('')
end = time.time()
return self.T[-1]
def sim_delay(self, delay=1, runtime=100):
t = 0
self.tau = delay
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
for i in range(0, delay + 1):
self.X.append(self.x)
self.T.append(0)
start = time.time()
while self.agreement() == False:
if self.T[-1] > runtime:
break
if t == 0 and self.warn and not nx.is_connected(self.graph):
print(
'Graph not connected, consensus algorithm will probably not converge!'
)
print('Simulating to 5 seconds...')
flag = True
if flag and time.time() - start > 5:
break
self.x = self.X[-1]
if len(self.X) - delay < 0:
pass
else:
index = len(self.X) - delay
self.x = self.X[-1] + self.dt * self.f(self.X[index], *self
.f_arg)
self.X.append(self.x)
self.T.append(time.time() - start)
t = t + self.dt
end = time.time()
return self.T[-1]
def plot(self, weight_average=False):
"""Show the convergence analysis"""
if len(self.X) == 0 or len(self.T) == 0:
print('Nothing to plot...')
x = np.array(self.X)
for i in range(0, x.shape[1]):
plt.plot(self.T, x[:, i, 0])
if weight_average:
w_i = np.zeros(self.size)
s = sum(np.array(self.graph.degree)[:, 1])
x = self.x_init
for i in nx.nodes(self.graph):
w_i[i] = self.graph.degree(i) / s
x[i] = x[i] * w_i[i]
plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + sum(x),
label='Connected graph consensus: ' + str(sum(x)), color=
'red', marker='s')
else:
plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + np.mean
(self.x_init), label='Connected graph consensus: ' + str(
round(np.mean(self.x_init), 3)), color='red', marker='s')
plt.grid()
plt.xlabel('Time (seconds)')
plt.ylabel('State')
plt.title('Convergence of consensus algorithm')
plt.legend()
def print_delay(self):
print('Delay in seconds')
return self.dt * self.tau
def delay_stable_max(self):
d = maximum_degree(self.graph)
return np.pi / (4 * d[1])
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
import networkx as nx
import time
import sys
class ConsensusSimulation:
"""Class to model a general consensus problem
see DOI: 10.1109/JPROC.2006.887293"""
def __init__(self, topology, dynamics, dynamics_args, time_step=0.01,
x_init=None, convergence_warning=True, delay=0):
if isinstance(topology, nx.Graph):
self.graph = topology
self.size = len(self.graph)
else:
print('Argument Error: topology must be type', type(nx.Graph()))
if callable(dynamics):
self.f = dynamics
if len(dynamics_args) == 1:
self.f_arg = dynamics_args, 1
self.f_arg = dynamics_args
else:
print('Argument Error: dynamics must be a function')
self.dt = time_step
self.tau = delay
if not isinstance(x_init, type(np.ones(1))) and x_init == None:
self.x = np.linspace(1, self.size, self.size)
self.x = self.x.reshape(self.size, 1)
else:
self.x = x_init.copy().reshape(self.size, 1)
self.L = nx.laplacian_matrix(self.graph).todense()
self.X = list()
self.T = list()
self.warn = convergence_warning
self.d_max = max(np.array(self.graph.degree)[:, 1])
self.tau_max = np.pi / (4 * self.d_max)
def disagreement(self):
"""Returns the 'error'/inhomogeneity in the
decision vector"""
return 0.5 * np.dot(np.dot(np.transpose(self.x), self.L), self.x).item(
0)
def agreement(self, tol=1e-06):
"""Test for convergence"""
if self.disagreement() < tol:
return True
else:
return False
def run_sim(self, record_all=False, update_every=1.0):
"""run the core simulation"""
t = 0
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
self.X.append(self.x)
self.T.append(0)
start = time.time()
time_since_last_update = 0.0
progress = 1
while self.agreement() == False:
start_it = time.time()
if t == 0 and self.warn and not nx.is_connected(self.graph):
print(
'Graph not connected, consensus algorithm will probably not converge!'
)
print('Simulating to 5 seconds...')
flag = True
if flag and time.time() - start > 5:
break
self.x = self.x + self.dt * self.f(self.x, *self.f_arg)
if record_all:
self.X.append(self.x)
self.T.append(time.time() - start)
elif t - np.floor(t) < 0.01:
self.X.append(self.x)
self.T.append(time.time() - start)
t = t + self.dt
end = time.time() - start_it
time_since_last_update += end
if time_since_last_update >= update_every:
sys.stdout.write('\r' +
'Iteration: {}, disagreement: {}, time: {}'.format(
progress, self.disagreement(), time.time() - start))
sys.stdout.flush()
time_since_last_update = 0.0
progress += 1
print('')
end = time.time()
return self.T[-1]
def sim_delay(self, delay=1, runtime=100):
t = 0
self.tau = delay
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
for i in range(0, delay + 1):
self.X.append(self.x)
self.T.append(0)
start = time.time()
while self.agreement() == False:
if self.T[-1] > runtime:
break
if t == 0 and self.warn and not nx.is_connected(self.graph):
print(
'Graph not connected, consensus algorithm will probably not converge!'
)
print('Simulating to 5 seconds...')
flag = True
if flag and time.time() - start > 5:
break
self.x = self.X[-1]
if len(self.X) - delay < 0:
pass
else:
index = len(self.X) - delay
self.x = self.X[-1] + self.dt * self.f(self.X[index], *self
.f_arg)
self.X.append(self.x)
self.T.append(time.time() - start)
t = t + self.dt
end = time.time()
return self.T[-1]
def plot(self, weight_average=False):
"""Show the convergence analysis"""
if len(self.X) == 0 or len(self.T) == 0:
print('Nothing to plot...')
x = np.array(self.X)
for i in range(0, x.shape[1]):
plt.plot(self.T, x[:, i, 0])
if weight_average:
w_i = np.zeros(self.size)
s = sum(np.array(self.graph.degree)[:, 1])
x = self.x_init
for i in nx.nodes(self.graph):
w_i[i] = self.graph.degree(i) / s
x[i] = x[i] * w_i[i]
plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + sum(x),
label='Connected graph consensus: ' + str(sum(x)), color=
'red', marker='s')
else:
plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + np.mean
(self.x_init), label='Connected graph consensus: ' + str(
round(np.mean(self.x_init), 3)), color='red', marker='s')
plt.grid()
plt.xlabel('Time (seconds)')
plt.ylabel('State')
plt.title('Convergence of consensus algorithm')
plt.legend()
def print_delay(self):
print('Delay in seconds')
return self.dt * self.tau
def delay_stable_max(self):
d = maximum_degree(self.graph)
return np.pi / (4 * d[1])
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
import networkx as nx
import time
import sys
class ConsensusSimulation:
"""Class to model a general consensus problem
see DOI: 10.1109/JPROC.2006.887293"""
def __init__(self,
topology,
dynamics,
dynamics_args,
time_step=0.01,
x_init=None,
convergence_warning=True,
delay=0):
# check arguments are of the
# correct form
if(isinstance(topology,nx.Graph)):
self.graph = topology
self.size = len(self.graph)
else:
print("Argument Error: topology must be type"
, type(nx.Graph()))
if(callable(dynamics)):
self.f = dynamics
if(len(dynamics_args)==1):
self.f_arg = (dynamics_args,1)
self.f_arg = dynamics_args
else:
print("Argument Error: dynamics must be a function")
self.dt = time_step
self.tau = delay
# set up initial vector to
# 1,2,3,...,n
if(not isinstance(x_init, type(np.ones(1))) and x_init==None):
self.x = np.linspace(1,self.size,self.size)
self.x = self.x.reshape(self.size,1)
else:
self.x = x_init.copy().reshape(self.size,1)
# The Laplacian matrix, quite the building block
# for the algorithms
self.L = nx.laplacian_matrix(self.graph).todense()
self.X = list()
self.T = list()
# connected graph won't converge
# maybe there's some algorithm that will
# though...
self.warn = convergence_warning
self.d_max = max(np.array(self.graph.degree)[:,1])
self.tau_max = (np.pi)/(4*self.d_max)
def disagreement(self):
"""Returns the 'error'/inhomogeneity in the
decision vector"""
return 0.5*(np.dot(np.dot(np.transpose(self.x),self.L),self.x)).item(0)
def agreement(self,tol=1e-6):
"""Test for convergence"""
if(self.disagreement()<tol):
return True
else:
return False
def run_sim(self,record_all=False,update_every=1.0):
"""run the core simulation"""
t=0
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
self.X.append(self.x)
self.T.append(0)
start = time.time()
time_since_last_update = 0.0
progress = 1
while self.agreement() == False:
start_it = time.time()
if(t==0 and self.warn and not nx.is_connected(self.graph)):
print("Graph not connected, consensus algorithm will probably not converge!")
print("Simulating to 5 seconds...")
flag = True
if(flag and time.time()-start>5):
break
# core simulation done here
# very simple discretisation...
self.x = self.x+self.dt*self.f(self.x,*self.f_arg)
# odd way to test for 1,2,3,etc
# when arg is float
if (record_all):
self.X.append(self.x)
self.T.append(time.time()-start)
else:
if (t-np.floor(t)<1e-2):
self.X.append(self.x)
self.T.append(time.time()-start)
t = t+self.dt
end = time.time()-start_it
time_since_last_update += end
if time_since_last_update >= update_every:
sys.stdout.write("\r" + "Iteration: {}, disagreement: {}, time: {}".format(progress,self.disagreement(),time.time()-start))
sys.stdout.flush()
time_since_last_update = 0.0
progress += 1
print("")
end = time.time()
return self.T[-1]
def sim_delay(self, delay = 1, runtime=100):
t=0
self.tau=delay
self.x_init = self.x
self.X = list()
self.T = list()
flag = False
for i in range(0,delay+1):
self.X.append(self.x)
self.T.append(0)
start = time.time()
while self.agreement() == False:
if (self.T[-1] > runtime):
break
if (t==0 and self.warn and not nx.is_connected(self.graph)):
print("Graph not connected, consensus algorithm will probably not converge!")
print("Simulating to 5 seconds...")
flag = True
if(flag and time.time()-start>5):
break
# core simulation done here
# very simple discretisation...
self.x = self.X[-1]
if (len(self.X)-delay<0):
pass
else:
index = len(self.X)-delay
self.x = self.X[-1]+self.dt*self.f(self.X[index],*self.f_arg)
# odd way to test for 1,2,3,etc
# when arg is float
self.X.append(self.x)
self.T.append(time.time()-start)
t = t+self.dt
end = time.time()
return self.T[-1]
def plot(self, weight_average=False):
"""Show the convergence analysis"""
if(len(self.X)==0 or len(self.T)==0):
print("Nothing to plot...")
x = np.array(self.X)
for i in range(0,x.shape[1]):
plt.plot(self.T,x[:,i,0])
if(weight_average):
w_i = np.zeros(self.size)
s = sum(np.array(self.graph.degree)[:,1])
x = self.x_init
for i in nx.nodes(self.graph):
w_i[i] = self.graph.degree(i)/s
x[i] = x[i]*w_i[i]
plt.plot(np.linspace(0,self.T[-1],10),np.zeros(10)+sum(x), label="Connected graph consensus: "+str(sum(x)),color='red',marker='s')
else:
plt.plot(np.linspace(0,self.T[-1],10),np.zeros(10)+np.mean(self.x_init), label="Connected graph consensus: "+str(round(np.mean(self.x_init),3)),color='red',marker='s')
plt.grid()
plt.xlabel("Time (seconds)")
plt.ylabel("State")
plt.title("Convergence of consensus algorithm")
plt.legend()
def print_delay(self):
print("Delay in seconds")
return self.dt*self.tau
def delay_stable_max(self):
d = maximum_degree(self.graph)
return (np.pi)/(4*d[1])
|
flexible
|
{
"blob_id": "3164eab8dc221149c9f865645edf9991d810d2ac",
"index": 8698,
"step-1": "<mask token>\n\n\nclass ConsensusSimulation:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def run_sim(self, record_all=False, update_every=1.0):\n \"\"\"run the core simulation\"\"\"\n t = 0\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n time_since_last_update = 0.0\n progress = 1\n while self.agreement() == False:\n start_it = time.time()\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.x + self.dt * self.f(self.x, *self.f_arg)\n if record_all:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n elif t - np.floor(t) < 0.01:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time() - start_it\n time_since_last_update += end\n if time_since_last_update >= update_every:\n sys.stdout.write('\\r' +\n 'Iteration: {}, disagreement: {}, time: {}'.format(\n progress, self.disagreement(), time.time() - start))\n sys.stdout.flush()\n time_since_last_update = 0.0\n progress += 1\n print('')\n end = time.time()\n return self.T[-1]\n\n def sim_delay(self, delay=1, runtime=100):\n t = 0\n self.tau = delay\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n for i in range(0, delay + 1):\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n while self.agreement() == False:\n if self.T[-1] > runtime:\n break\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.X[-1]\n if len(self.X) - delay < 0:\n pass\n else:\n index = len(self.X) - delay\n self.x = self.X[-1] + self.dt * self.f(self.X[index], *self\n .f_arg)\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time()\n return self.T[-1]\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ConsensusSimulation:\n <mask token>\n\n def __init__(self, topology, dynamics, dynamics_args, time_step=0.01,\n x_init=None, convergence_warning=True, delay=0):\n if isinstance(topology, nx.Graph):\n self.graph = topology\n self.size = len(self.graph)\n else:\n print('Argument Error: topology must be type', type(nx.Graph()))\n if callable(dynamics):\n self.f = dynamics\n if len(dynamics_args) == 1:\n self.f_arg = dynamics_args, 1\n self.f_arg = dynamics_args\n else:\n print('Argument Error: dynamics must be a function')\n self.dt = time_step\n self.tau = delay\n if not isinstance(x_init, type(np.ones(1))) and x_init == None:\n self.x = np.linspace(1, self.size, self.size)\n self.x = self.x.reshape(self.size, 1)\n else:\n self.x = x_init.copy().reshape(self.size, 1)\n self.L = nx.laplacian_matrix(self.graph).todense()\n self.X = list()\n self.T = list()\n self.warn = convergence_warning\n self.d_max = max(np.array(self.graph.degree)[:, 1])\n self.tau_max = np.pi / (4 * self.d_max)\n\n def disagreement(self):\n \"\"\"Returns the 'error'/inhomogeneity in the\n decision vector\"\"\"\n return 0.5 * np.dot(np.dot(np.transpose(self.x), self.L), self.x).item(\n 0)\n\n def agreement(self, tol=1e-06):\n \"\"\"Test for convergence\"\"\"\n if self.disagreement() < tol:\n return True\n else:\n return False\n\n def run_sim(self, record_all=False, update_every=1.0):\n \"\"\"run the core simulation\"\"\"\n t = 0\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n time_since_last_update = 0.0\n progress = 1\n while self.agreement() == False:\n start_it = time.time()\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.x + self.dt * self.f(self.x, *self.f_arg)\n if record_all:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n elif t - np.floor(t) < 0.01:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time() - start_it\n time_since_last_update += end\n if time_since_last_update >= update_every:\n sys.stdout.write('\\r' +\n 'Iteration: {}, disagreement: {}, time: {}'.format(\n progress, self.disagreement(), time.time() - start))\n sys.stdout.flush()\n time_since_last_update = 0.0\n progress += 1\n print('')\n end = time.time()\n return self.T[-1]\n\n def sim_delay(self, delay=1, runtime=100):\n t = 0\n self.tau = delay\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n for i in range(0, delay + 1):\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n while self.agreement() == False:\n if self.T[-1] > runtime:\n break\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.X[-1]\n if len(self.X) - delay < 0:\n pass\n else:\n index = len(self.X) - delay\n self.x = self.X[-1] + self.dt * self.f(self.X[index], *self\n .f_arg)\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time()\n return self.T[-1]\n <mask token>\n\n def print_delay(self):\n print('Delay in seconds')\n return self.dt * self.tau\n\n def delay_stable_max(self):\n d = maximum_degree(self.graph)\n return np.pi / (4 * d[1])\n",
"step-3": "<mask token>\n\n\nclass ConsensusSimulation:\n <mask token>\n\n def __init__(self, topology, dynamics, dynamics_args, time_step=0.01,\n x_init=None, convergence_warning=True, delay=0):\n if isinstance(topology, nx.Graph):\n self.graph = topology\n self.size = len(self.graph)\n else:\n print('Argument Error: topology must be type', type(nx.Graph()))\n if callable(dynamics):\n self.f = dynamics\n if len(dynamics_args) == 1:\n self.f_arg = dynamics_args, 1\n self.f_arg = dynamics_args\n else:\n print('Argument Error: dynamics must be a function')\n self.dt = time_step\n self.tau = delay\n if not isinstance(x_init, type(np.ones(1))) and x_init == None:\n self.x = np.linspace(1, self.size, self.size)\n self.x = self.x.reshape(self.size, 1)\n else:\n self.x = x_init.copy().reshape(self.size, 1)\n self.L = nx.laplacian_matrix(self.graph).todense()\n self.X = list()\n self.T = list()\n self.warn = convergence_warning\n self.d_max = max(np.array(self.graph.degree)[:, 1])\n self.tau_max = np.pi / (4 * self.d_max)\n\n def disagreement(self):\n \"\"\"Returns the 'error'/inhomogeneity in the\n decision vector\"\"\"\n return 0.5 * np.dot(np.dot(np.transpose(self.x), self.L), self.x).item(\n 0)\n\n def agreement(self, tol=1e-06):\n \"\"\"Test for convergence\"\"\"\n if self.disagreement() < tol:\n return True\n else:\n return False\n\n def run_sim(self, record_all=False, update_every=1.0):\n \"\"\"run the core simulation\"\"\"\n t = 0\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n time_since_last_update = 0.0\n progress = 1\n while self.agreement() == False:\n start_it = time.time()\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.x + self.dt * self.f(self.x, *self.f_arg)\n if record_all:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n elif t - np.floor(t) < 0.01:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time() - start_it\n time_since_last_update += end\n if time_since_last_update >= update_every:\n sys.stdout.write('\\r' +\n 'Iteration: {}, disagreement: {}, time: {}'.format(\n progress, self.disagreement(), time.time() - start))\n sys.stdout.flush()\n time_since_last_update = 0.0\n progress += 1\n print('')\n end = time.time()\n return self.T[-1]\n\n def sim_delay(self, delay=1, runtime=100):\n t = 0\n self.tau = delay\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n for i in range(0, delay + 1):\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n while self.agreement() == False:\n if self.T[-1] > runtime:\n break\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.X[-1]\n if len(self.X) - delay < 0:\n pass\n else:\n index = len(self.X) - delay\n self.x = self.X[-1] + self.dt * self.f(self.X[index], *self\n .f_arg)\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time()\n return self.T[-1]\n\n def plot(self, weight_average=False):\n \"\"\"Show the convergence analysis\"\"\"\n if len(self.X) == 0 or len(self.T) == 0:\n print('Nothing to plot...')\n x = np.array(self.X)\n for i in range(0, x.shape[1]):\n plt.plot(self.T, x[:, i, 0])\n if weight_average:\n w_i = np.zeros(self.size)\n s = sum(np.array(self.graph.degree)[:, 1])\n x = self.x_init\n for i in nx.nodes(self.graph):\n w_i[i] = self.graph.degree(i) / s\n x[i] = x[i] * w_i[i]\n plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + sum(x),\n label='Connected graph consensus: ' + str(sum(x)), color=\n 'red', marker='s')\n else:\n plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + np.mean\n (self.x_init), label='Connected graph consensus: ' + str(\n round(np.mean(self.x_init), 3)), color='red', marker='s')\n plt.grid()\n plt.xlabel('Time (seconds)')\n plt.ylabel('State')\n plt.title('Convergence of consensus algorithm')\n plt.legend()\n\n def print_delay(self):\n print('Delay in seconds')\n return self.dt * self.tau\n\n def delay_stable_max(self):\n d = maximum_degree(self.graph)\n return np.pi / (4 * d[1])\n",
"step-4": "import numpy as np\nimport matplotlib.pyplot as plt\nimport networkx as nx\nimport time\nimport sys\n\n\nclass ConsensusSimulation:\n \"\"\"Class to model a general consensus problem\n see DOI: 10.1109/JPROC.2006.887293\"\"\"\n\n def __init__(self, topology, dynamics, dynamics_args, time_step=0.01,\n x_init=None, convergence_warning=True, delay=0):\n if isinstance(topology, nx.Graph):\n self.graph = topology\n self.size = len(self.graph)\n else:\n print('Argument Error: topology must be type', type(nx.Graph()))\n if callable(dynamics):\n self.f = dynamics\n if len(dynamics_args) == 1:\n self.f_arg = dynamics_args, 1\n self.f_arg = dynamics_args\n else:\n print('Argument Error: dynamics must be a function')\n self.dt = time_step\n self.tau = delay\n if not isinstance(x_init, type(np.ones(1))) and x_init == None:\n self.x = np.linspace(1, self.size, self.size)\n self.x = self.x.reshape(self.size, 1)\n else:\n self.x = x_init.copy().reshape(self.size, 1)\n self.L = nx.laplacian_matrix(self.graph).todense()\n self.X = list()\n self.T = list()\n self.warn = convergence_warning\n self.d_max = max(np.array(self.graph.degree)[:, 1])\n self.tau_max = np.pi / (4 * self.d_max)\n\n def disagreement(self):\n \"\"\"Returns the 'error'/inhomogeneity in the\n decision vector\"\"\"\n return 0.5 * np.dot(np.dot(np.transpose(self.x), self.L), self.x).item(\n 0)\n\n def agreement(self, tol=1e-06):\n \"\"\"Test for convergence\"\"\"\n if self.disagreement() < tol:\n return True\n else:\n return False\n\n def run_sim(self, record_all=False, update_every=1.0):\n \"\"\"run the core simulation\"\"\"\n t = 0\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n time_since_last_update = 0.0\n progress = 1\n while self.agreement() == False:\n start_it = time.time()\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.x + self.dt * self.f(self.x, *self.f_arg)\n if record_all:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n elif t - np.floor(t) < 0.01:\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time() - start_it\n time_since_last_update += end\n if time_since_last_update >= update_every:\n sys.stdout.write('\\r' +\n 'Iteration: {}, disagreement: {}, time: {}'.format(\n progress, self.disagreement(), time.time() - start))\n sys.stdout.flush()\n time_since_last_update = 0.0\n progress += 1\n print('')\n end = time.time()\n return self.T[-1]\n\n def sim_delay(self, delay=1, runtime=100):\n t = 0\n self.tau = delay\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n for i in range(0, delay + 1):\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n while self.agreement() == False:\n if self.T[-1] > runtime:\n break\n if t == 0 and self.warn and not nx.is_connected(self.graph):\n print(\n 'Graph not connected, consensus algorithm will probably not converge!'\n )\n print('Simulating to 5 seconds...')\n flag = True\n if flag and time.time() - start > 5:\n break\n self.x = self.X[-1]\n if len(self.X) - delay < 0:\n pass\n else:\n index = len(self.X) - delay\n self.x = self.X[-1] + self.dt * self.f(self.X[index], *self\n .f_arg)\n self.X.append(self.x)\n self.T.append(time.time() - start)\n t = t + self.dt\n end = time.time()\n return self.T[-1]\n\n def plot(self, weight_average=False):\n \"\"\"Show the convergence analysis\"\"\"\n if len(self.X) == 0 or len(self.T) == 0:\n print('Nothing to plot...')\n x = np.array(self.X)\n for i in range(0, x.shape[1]):\n plt.plot(self.T, x[:, i, 0])\n if weight_average:\n w_i = np.zeros(self.size)\n s = sum(np.array(self.graph.degree)[:, 1])\n x = self.x_init\n for i in nx.nodes(self.graph):\n w_i[i] = self.graph.degree(i) / s\n x[i] = x[i] * w_i[i]\n plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + sum(x),\n label='Connected graph consensus: ' + str(sum(x)), color=\n 'red', marker='s')\n else:\n plt.plot(np.linspace(0, self.T[-1], 10), np.zeros(10) + np.mean\n (self.x_init), label='Connected graph consensus: ' + str(\n round(np.mean(self.x_init), 3)), color='red', marker='s')\n plt.grid()\n plt.xlabel('Time (seconds)')\n plt.ylabel('State')\n plt.title('Convergence of consensus algorithm')\n plt.legend()\n\n def print_delay(self):\n print('Delay in seconds')\n return self.dt * self.tau\n\n def delay_stable_max(self):\n d = maximum_degree(self.graph)\n return np.pi / (4 * d[1])\n",
"step-5": "import numpy as np\nimport matplotlib.pyplot as plt\nimport networkx as nx\nimport time\nimport sys\n\nclass ConsensusSimulation:\n \"\"\"Class to model a general consensus problem\n see DOI: 10.1109/JPROC.2006.887293\"\"\"\n def __init__(self,\n topology,\n dynamics,\n dynamics_args,\n time_step=0.01,\n x_init=None,\n convergence_warning=True,\n delay=0):\n # check arguments are of the\n # correct form\n if(isinstance(topology,nx.Graph)):\n self.graph = topology\n self.size = len(self.graph)\n else:\n print(\"Argument Error: topology must be type\"\n , type(nx.Graph()))\n if(callable(dynamics)):\n self.f = dynamics\n if(len(dynamics_args)==1):\n self.f_arg = (dynamics_args,1)\n self.f_arg = dynamics_args\n else:\n print(\"Argument Error: dynamics must be a function\")\n self.dt = time_step\n self.tau = delay\n # set up initial vector to\n # 1,2,3,...,n\n if(not isinstance(x_init, type(np.ones(1))) and x_init==None):\n self.x = np.linspace(1,self.size,self.size)\n self.x = self.x.reshape(self.size,1)\n else:\n self.x = x_init.copy().reshape(self.size,1)\n # The Laplacian matrix, quite the building block\n # for the algorithms\n self.L = nx.laplacian_matrix(self.graph).todense()\n self.X = list()\n self.T = list()\n # connected graph won't converge\n # maybe there's some algorithm that will\n # though...\n self.warn = convergence_warning\n\n self.d_max = max(np.array(self.graph.degree)[:,1])\n self.tau_max = (np.pi)/(4*self.d_max)\n\n def disagreement(self):\n \"\"\"Returns the 'error'/inhomogeneity in the\n decision vector\"\"\"\n return 0.5*(np.dot(np.dot(np.transpose(self.x),self.L),self.x)).item(0)\n\n def agreement(self,tol=1e-6):\n \"\"\"Test for convergence\"\"\"\n if(self.disagreement()<tol):\n return True\n else:\n return False\n\n def run_sim(self,record_all=False,update_every=1.0):\n \"\"\"run the core simulation\"\"\"\n t=0\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n time_since_last_update = 0.0\n progress = 1\n while self.agreement() == False:\n start_it = time.time()\n if(t==0 and self.warn and not nx.is_connected(self.graph)):\n print(\"Graph not connected, consensus algorithm will probably not converge!\")\n print(\"Simulating to 5 seconds...\")\n flag = True\n if(flag and time.time()-start>5):\n break\n # core simulation done here\n # very simple discretisation...\n self.x = self.x+self.dt*self.f(self.x,*self.f_arg)\n # odd way to test for 1,2,3,etc\n # when arg is float\n if (record_all):\n self.X.append(self.x)\n self.T.append(time.time()-start)\n else:\n if (t-np.floor(t)<1e-2):\n self.X.append(self.x)\n self.T.append(time.time()-start)\n t = t+self.dt\n end = time.time()-start_it\n time_since_last_update += end\n if time_since_last_update >= update_every:\n sys.stdout.write(\"\\r\" + \"Iteration: {}, disagreement: {}, time: {}\".format(progress,self.disagreement(),time.time()-start))\n sys.stdout.flush()\n time_since_last_update = 0.0\n progress += 1\n\n print(\"\")\n end = time.time()\n return self.T[-1]\n\n def sim_delay(self, delay = 1, runtime=100):\n t=0\n self.tau=delay\n self.x_init = self.x\n self.X = list()\n self.T = list()\n flag = False\n for i in range(0,delay+1):\n self.X.append(self.x)\n self.T.append(0)\n start = time.time()\n while self.agreement() == False:\n if (self.T[-1] > runtime):\n break\n if (t==0 and self.warn and not nx.is_connected(self.graph)):\n print(\"Graph not connected, consensus algorithm will probably not converge!\")\n print(\"Simulating to 5 seconds...\")\n flag = True\n if(flag and time.time()-start>5):\n break\n # core simulation done here\n # very simple discretisation...\n self.x = self.X[-1]\n if (len(self.X)-delay<0):\n pass\n else:\n index = len(self.X)-delay\n self.x = self.X[-1]+self.dt*self.f(self.X[index],*self.f_arg)\n # odd way to test for 1,2,3,etc\n # when arg is float\n self.X.append(self.x)\n self.T.append(time.time()-start)\n t = t+self.dt\n end = time.time()\n return self.T[-1]\n\n def plot(self, weight_average=False):\n \"\"\"Show the convergence analysis\"\"\"\n if(len(self.X)==0 or len(self.T)==0):\n print(\"Nothing to plot...\")\n x = np.array(self.X)\n for i in range(0,x.shape[1]):\n plt.plot(self.T,x[:,i,0])\n if(weight_average):\n w_i = np.zeros(self.size)\n s = sum(np.array(self.graph.degree)[:,1])\n x = self.x_init\n for i in nx.nodes(self.graph):\n w_i[i] = self.graph.degree(i)/s\n x[i] = x[i]*w_i[i]\n plt.plot(np.linspace(0,self.T[-1],10),np.zeros(10)+sum(x), label=\"Connected graph consensus: \"+str(sum(x)),color='red',marker='s')\n else:\n plt.plot(np.linspace(0,self.T[-1],10),np.zeros(10)+np.mean(self.x_init), label=\"Connected graph consensus: \"+str(round(np.mean(self.x_init),3)),color='red',marker='s')\n plt.grid()\n plt.xlabel(\"Time (seconds)\")\n plt.ylabel(\"State\")\n plt.title(\"Convergence of consensus algorithm\")\n plt.legend()\n\n def print_delay(self):\n print(\"Delay in seconds\")\n return self.dt*self.tau\n\n def delay_stable_max(self):\n d = maximum_degree(self.graph)\n return (np.pi)/(4*d[1])\n",
"step-ids": [
3,
8,
9,
11,
12
]
}
|
[
3,
8,
9,
11,
12
] |
DEBUG = True
ADMINS = frozenset(["[email protected]"])
|
normal
|
{
"blob_id": "68bade5767d4f418bcae07485a179df5e47e652c",
"index": 9066,
"step-1": "<mask token>\n",
"step-2": "DEBUG = True\nADMINS = frozenset(['[email protected]'])\n",
"step-3": "DEBUG = True\nADMINS = frozenset([\"[email protected]\"])",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
class AdditiveAttention(nn.Module):
<|reserved_special_token_0|>
def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):
super(AdditiveAttention, self).__init__(**kwargs)
self.W_k = nn.Linear(key_size, num_hiddens, bias=False)
self.W_q = nn.Linear(query_size, num_hiddens, bias=False)
self.W_v = nn.Linear(num_hiddens, 1, bias=False)
self.dropout = nn.Dropout(dropout)
def forward(self, queries, keys, values, valid_lens):
queries, keys = self.W_q(queries), self.W_k(keys)
features = queries.unsqueeze(2) + keys.unsqueeze(1)
features = torch.tanh(features)
scores = self.W_v(features).squeeze(-1)
print('scores:', scores)
self.attention_weights = masked_softmax(scores, valid_lens)
return torch.bmm(self.dropout(self.attention_weights), values)
<|reserved_special_token_0|>
class DotProductAttention(nn.Module):
"""缩放点积注意力"""
def __init__(self, dropout, **kwargs):
super(DotProductAttention, self).__init__(**kwargs)
self.dropout = nn.Dropout(dropout)
def forward(self, queries, keys, values, valid_lens=None):
d = queries.shape[-1]
scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)
self.attention_weights = masked_softmax(scores, valid_lens)
return torch.bmm(self.dropout(self.attention_weights), values)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def masked_softmax(X, valid_lens):
"""通过在最后一个轴上掩蔽元素来执行softmax操作"""
if valid_lens is None:
return nn.functional.softmax(X, dim=-1)
else:
shape = X.shape
if valid_lens.dim() == 1:
valid_lens = torch.repeat_interleave(valid_lens, shape[1])
else:
valid_lens = valid_lens.reshape(-1)
X = d2l.sequence_mask(X.reshape(-1, shape[-1]), valid_lens, value=-
1000000.0)
return nn.functional.softmax(X.reshape(shape), dim=-1)
<|reserved_special_token_0|>
class AdditiveAttention(nn.Module):
"""加性注意力"""
def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):
super(AdditiveAttention, self).__init__(**kwargs)
self.W_k = nn.Linear(key_size, num_hiddens, bias=False)
self.W_q = nn.Linear(query_size, num_hiddens, bias=False)
self.W_v = nn.Linear(num_hiddens, 1, bias=False)
self.dropout = nn.Dropout(dropout)
def forward(self, queries, keys, values, valid_lens):
queries, keys = self.W_q(queries), self.W_k(keys)
features = queries.unsqueeze(2) + keys.unsqueeze(1)
features = torch.tanh(features)
scores = self.W_v(features).squeeze(-1)
print('scores:', scores)
self.attention_weights = masked_softmax(scores, valid_lens)
return torch.bmm(self.dropout(self.attention_weights), values)
<|reserved_special_token_0|>
class DotProductAttention(nn.Module):
"""缩放点积注意力"""
def __init__(self, dropout, **kwargs):
super(DotProductAttention, self).__init__(**kwargs)
self.dropout = nn.Dropout(dropout)
def forward(self, queries, keys, values, valid_lens=None):
d = queries.shape[-1]
scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)
self.attention_weights = masked_softmax(scores, valid_lens)
return torch.bmm(self.dropout(self.attention_weights), values)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def masked_softmax(X, valid_lens):
"""通过在最后一个轴上掩蔽元素来执行softmax操作"""
if valid_lens is None:
return nn.functional.softmax(X, dim=-1)
else:
shape = X.shape
if valid_lens.dim() == 1:
valid_lens = torch.repeat_interleave(valid_lens, shape[1])
else:
valid_lens = valid_lens.reshape(-1)
X = d2l.sequence_mask(X.reshape(-1, shape[-1]), valid_lens, value=-
1000000.0)
return nn.functional.softmax(X.reshape(shape), dim=-1)
<|reserved_special_token_0|>
print(masked_softmax(torch.rand(2, 2, 4), torch.tensor([2, 3])))
print(masked_softmax(torch.rand(2, 2, 4), torch.tensor([[1, 3], [2, 4]])))
class AdditiveAttention(nn.Module):
"""加性注意力"""
def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):
super(AdditiveAttention, self).__init__(**kwargs)
self.W_k = nn.Linear(key_size, num_hiddens, bias=False)
self.W_q = nn.Linear(query_size, num_hiddens, bias=False)
self.W_v = nn.Linear(num_hiddens, 1, bias=False)
self.dropout = nn.Dropout(dropout)
def forward(self, queries, keys, values, valid_lens):
queries, keys = self.W_q(queries), self.W_k(keys)
features = queries.unsqueeze(2) + keys.unsqueeze(1)
features = torch.tanh(features)
scores = self.W_v(features).squeeze(-1)
print('scores:', scores)
self.attention_weights = masked_softmax(scores, valid_lens)
return torch.bmm(self.dropout(self.attention_weights), values)
<|reserved_special_token_0|>
attention.eval()
print(attention(queries, keys, values, valid_lens))
class DotProductAttention(nn.Module):
"""缩放点积注意力"""
def __init__(self, dropout, **kwargs):
super(DotProductAttention, self).__init__(**kwargs)
self.dropout = nn.Dropout(dropout)
def forward(self, queries, keys, values, valid_lens=None):
d = queries.shape[-1]
scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)
self.attention_weights = masked_softmax(scores, valid_lens)
return torch.bmm(self.dropout(self.attention_weights), values)
<|reserved_special_token_0|>
attention.eval()
print(attention(queries, keys, values, valid_lens))
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def masked_softmax(X, valid_lens):
"""通过在最后一个轴上掩蔽元素来执行softmax操作"""
if valid_lens is None:
return nn.functional.softmax(X, dim=-1)
else:
shape = X.shape
if valid_lens.dim() == 1:
valid_lens = torch.repeat_interleave(valid_lens, shape[1])
else:
valid_lens = valid_lens.reshape(-1)
X = d2l.sequence_mask(X.reshape(-1, shape[-1]), valid_lens, value=-
1000000.0)
return nn.functional.softmax(X.reshape(shape), dim=-1)
<|reserved_special_token_0|>
print(masked_softmax(torch.rand(2, 2, 4), torch.tensor([2, 3])))
print(masked_softmax(torch.rand(2, 2, 4), torch.tensor([[1, 3], [2, 4]])))
class AdditiveAttention(nn.Module):
"""加性注意力"""
def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):
super(AdditiveAttention, self).__init__(**kwargs)
self.W_k = nn.Linear(key_size, num_hiddens, bias=False)
self.W_q = nn.Linear(query_size, num_hiddens, bias=False)
self.W_v = nn.Linear(num_hiddens, 1, bias=False)
self.dropout = nn.Dropout(dropout)
def forward(self, queries, keys, values, valid_lens):
queries, keys = self.W_q(queries), self.W_k(keys)
features = queries.unsqueeze(2) + keys.unsqueeze(1)
features = torch.tanh(features)
scores = self.W_v(features).squeeze(-1)
print('scores:', scores)
self.attention_weights = masked_softmax(scores, valid_lens)
return torch.bmm(self.dropout(self.attention_weights), values)
<|reserved_special_token_0|>
queries, keys = torch.normal(0, 1, (2, 1, 20)), torch.ones((2, 10, 2))
values = torch.arange(40, dtype=torch.float32).reshape(1, 10, 4).repeat(2, 1, 1
)
valid_lens = torch.tensor([2, 6])
attention = AdditiveAttention(key_size=2, query_size=20, num_hiddens=8,
dropout=0.1)
attention.eval()
print(attention(queries, keys, values, valid_lens))
class DotProductAttention(nn.Module):
"""缩放点积注意力"""
def __init__(self, dropout, **kwargs):
super(DotProductAttention, self).__init__(**kwargs)
self.dropout = nn.Dropout(dropout)
def forward(self, queries, keys, values, valid_lens=None):
d = queries.shape[-1]
scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)
self.attention_weights = masked_softmax(scores, valid_lens)
return torch.bmm(self.dropout(self.attention_weights), values)
<|reserved_special_token_0|>
queries = torch.normal(0, 1, (2, 1, 2))
attention = DotProductAttention(dropout=0.5)
attention.eval()
print(attention(queries, keys, values, valid_lens))
<|reserved_special_token_1|>
import math
import torch
from torch import nn
from d2l import torch as d2l
def masked_softmax(X, valid_lens):
"""通过在最后一个轴上掩蔽元素来执行softmax操作"""
# X:3D张量,valid_lens:1D或2D张量
if valid_lens is None:
return nn.functional.softmax(X, dim=-1)
else:
shape = X.shape
if valid_lens.dim() == 1:
valid_lens = torch.repeat_interleave(valid_lens, shape[1])
else:
valid_lens = valid_lens.reshape(-1)
# 最后一轴上被掩蔽的元素使用一个非常大的负值替换,从而其softmax输出为0
X = d2l.sequence_mask(X.reshape(-1, shape[-1]), valid_lens, value=-1e6)
return nn.functional.softmax(X.reshape(shape), dim=-1)
"""测试softmax掩码"""
print(masked_softmax(torch.rand(2, 2, 4), torch.tensor([2, 3])))
print(masked_softmax(torch.rand(2, 2, 4), torch.tensor([[1, 3], [2, 4]])))
class AdditiveAttention(nn.Module):
"""加性注意力"""
def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):
super(AdditiveAttention, self).__init__(**kwargs)
self.W_k = nn.Linear(key_size, num_hiddens, bias=False)
self.W_q = nn.Linear(query_size, num_hiddens, bias=False)
self.W_v = nn.Linear(num_hiddens, 1, bias=False)
self.dropout = nn.Dropout(dropout)
def forward(self, queries, keys, values, valid_lens):
queries, keys = self.W_q(queries), self.W_k(keys)
# 在维度扩展后,
# queries的形状:(batch_size,查询的个数,1,num_hidden)
# keys的形状:(batch_size,1,“键-值”对的个数,num_hiddens)
# 使用广播方式进行求和
features = queries.unsqueeze(2) + keys.unsqueeze(1)
features = torch.tanh(features)
# self.w_v仅有一个输出,因此从形状中移除最后那个维度。
# scores的形状:(batch_size,查询的个数,“键-值”对的个数)
scores = self.W_v(features).squeeze(-1)
print("scores:", scores)
self.attention_weights = masked_softmax(scores, valid_lens)
# values的形状:(batch_size,“键-值”对的个数,值的维度)
return torch.bmm(self.dropout(self.attention_weights), values)
"""加性注意力函数测试"""
queries, keys = torch.normal(0, 1, (2, 1, 20)), torch.ones((2, 10, 2))
# values的小批量,两个值矩阵是相同的
values = torch.arange(40, dtype=torch.float32).reshape(1, 10, 4).repeat(2, 1, 1)
valid_lens = torch.tensor([2, 6])
attention = AdditiveAttention(key_size=2, query_size=20, num_hiddens=8, dropout=0.1)
attention.eval()
print(attention(queries, keys, values, valid_lens))
class DotProductAttention(nn.Module):
"""缩放点积注意力"""
def __init__(self, dropout, **kwargs):
super(DotProductAttention, self).__init__(**kwargs)
self.dropout = nn.Dropout(dropout)
# queries的形状:(batch_size,查询的个数,d)
# keys的形状:(batch_size,“键-值”对的个数,d)
# values的形状:(batch_size,“键-值”对的个数,值的维度)
# valid_lens的形状:(batch_size,)或者(batch_size,查询的个数)
def forward(self, queries, keys, values, valid_lens=None):
d = queries.shape[-1]
# 设置transpose_b=True为了交换的keys的最后两个维度
scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)
self.attention_weights = masked_softmax(scores, valid_lens)
return torch.bmm(self.dropout(self.attention_weights), values)
"""缩放点积注意力函数测试"""
queries = torch.normal(0, 1, (2, 1, 2))
attention = DotProductAttention(dropout=0.5)
attention.eval()
print(attention(queries, keys, values, valid_lens))
|
flexible
|
{
"blob_id": "cda01bc7b0ebcfaf010bb87e7d9be34fd310d7a7",
"index": 9626,
"step-1": "<mask token>\n\n\nclass AdditiveAttention(nn.Module):\n <mask token>\n\n def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):\n super(AdditiveAttention, self).__init__(**kwargs)\n self.W_k = nn.Linear(key_size, num_hiddens, bias=False)\n self.W_q = nn.Linear(query_size, num_hiddens, bias=False)\n self.W_v = nn.Linear(num_hiddens, 1, bias=False)\n self.dropout = nn.Dropout(dropout)\n\n def forward(self, queries, keys, values, valid_lens):\n queries, keys = self.W_q(queries), self.W_k(keys)\n features = queries.unsqueeze(2) + keys.unsqueeze(1)\n features = torch.tanh(features)\n scores = self.W_v(features).squeeze(-1)\n print('scores:', scores)\n self.attention_weights = masked_softmax(scores, valid_lens)\n return torch.bmm(self.dropout(self.attention_weights), values)\n\n\n<mask token>\n\n\nclass DotProductAttention(nn.Module):\n \"\"\"缩放点积注意力\"\"\"\n\n def __init__(self, dropout, **kwargs):\n super(DotProductAttention, self).__init__(**kwargs)\n self.dropout = nn.Dropout(dropout)\n\n def forward(self, queries, keys, values, valid_lens=None):\n d = queries.shape[-1]\n scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)\n self.attention_weights = masked_softmax(scores, valid_lens)\n return torch.bmm(self.dropout(self.attention_weights), values)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef masked_softmax(X, valid_lens):\n \"\"\"通过在最后一个轴上掩蔽元素来执行softmax操作\"\"\"\n if valid_lens is None:\n return nn.functional.softmax(X, dim=-1)\n else:\n shape = X.shape\n if valid_lens.dim() == 1:\n valid_lens = torch.repeat_interleave(valid_lens, shape[1])\n else:\n valid_lens = valid_lens.reshape(-1)\n X = d2l.sequence_mask(X.reshape(-1, shape[-1]), valid_lens, value=-\n 1000000.0)\n return nn.functional.softmax(X.reshape(shape), dim=-1)\n\n\n<mask token>\n\n\nclass AdditiveAttention(nn.Module):\n \"\"\"加性注意力\"\"\"\n\n def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):\n super(AdditiveAttention, self).__init__(**kwargs)\n self.W_k = nn.Linear(key_size, num_hiddens, bias=False)\n self.W_q = nn.Linear(query_size, num_hiddens, bias=False)\n self.W_v = nn.Linear(num_hiddens, 1, bias=False)\n self.dropout = nn.Dropout(dropout)\n\n def forward(self, queries, keys, values, valid_lens):\n queries, keys = self.W_q(queries), self.W_k(keys)\n features = queries.unsqueeze(2) + keys.unsqueeze(1)\n features = torch.tanh(features)\n scores = self.W_v(features).squeeze(-1)\n print('scores:', scores)\n self.attention_weights = masked_softmax(scores, valid_lens)\n return torch.bmm(self.dropout(self.attention_weights), values)\n\n\n<mask token>\n\n\nclass DotProductAttention(nn.Module):\n \"\"\"缩放点积注意力\"\"\"\n\n def __init__(self, dropout, **kwargs):\n super(DotProductAttention, self).__init__(**kwargs)\n self.dropout = nn.Dropout(dropout)\n\n def forward(self, queries, keys, values, valid_lens=None):\n d = queries.shape[-1]\n scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)\n self.attention_weights = masked_softmax(scores, valid_lens)\n return torch.bmm(self.dropout(self.attention_weights), values)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef masked_softmax(X, valid_lens):\n \"\"\"通过在最后一个轴上掩蔽元素来执行softmax操作\"\"\"\n if valid_lens is None:\n return nn.functional.softmax(X, dim=-1)\n else:\n shape = X.shape\n if valid_lens.dim() == 1:\n valid_lens = torch.repeat_interleave(valid_lens, shape[1])\n else:\n valid_lens = valid_lens.reshape(-1)\n X = d2l.sequence_mask(X.reshape(-1, shape[-1]), valid_lens, value=-\n 1000000.0)\n return nn.functional.softmax(X.reshape(shape), dim=-1)\n\n\n<mask token>\nprint(masked_softmax(torch.rand(2, 2, 4), torch.tensor([2, 3])))\nprint(masked_softmax(torch.rand(2, 2, 4), torch.tensor([[1, 3], [2, 4]])))\n\n\nclass AdditiveAttention(nn.Module):\n \"\"\"加性注意力\"\"\"\n\n def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):\n super(AdditiveAttention, self).__init__(**kwargs)\n self.W_k = nn.Linear(key_size, num_hiddens, bias=False)\n self.W_q = nn.Linear(query_size, num_hiddens, bias=False)\n self.W_v = nn.Linear(num_hiddens, 1, bias=False)\n self.dropout = nn.Dropout(dropout)\n\n def forward(self, queries, keys, values, valid_lens):\n queries, keys = self.W_q(queries), self.W_k(keys)\n features = queries.unsqueeze(2) + keys.unsqueeze(1)\n features = torch.tanh(features)\n scores = self.W_v(features).squeeze(-1)\n print('scores:', scores)\n self.attention_weights = masked_softmax(scores, valid_lens)\n return torch.bmm(self.dropout(self.attention_weights), values)\n\n\n<mask token>\nattention.eval()\nprint(attention(queries, keys, values, valid_lens))\n\n\nclass DotProductAttention(nn.Module):\n \"\"\"缩放点积注意力\"\"\"\n\n def __init__(self, dropout, **kwargs):\n super(DotProductAttention, self).__init__(**kwargs)\n self.dropout = nn.Dropout(dropout)\n\n def forward(self, queries, keys, values, valid_lens=None):\n d = queries.shape[-1]\n scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)\n self.attention_weights = masked_softmax(scores, valid_lens)\n return torch.bmm(self.dropout(self.attention_weights), values)\n\n\n<mask token>\nattention.eval()\nprint(attention(queries, keys, values, valid_lens))\n",
"step-4": "<mask token>\n\n\ndef masked_softmax(X, valid_lens):\n \"\"\"通过在最后一个轴上掩蔽元素来执行softmax操作\"\"\"\n if valid_lens is None:\n return nn.functional.softmax(X, dim=-1)\n else:\n shape = X.shape\n if valid_lens.dim() == 1:\n valid_lens = torch.repeat_interleave(valid_lens, shape[1])\n else:\n valid_lens = valid_lens.reshape(-1)\n X = d2l.sequence_mask(X.reshape(-1, shape[-1]), valid_lens, value=-\n 1000000.0)\n return nn.functional.softmax(X.reshape(shape), dim=-1)\n\n\n<mask token>\nprint(masked_softmax(torch.rand(2, 2, 4), torch.tensor([2, 3])))\nprint(masked_softmax(torch.rand(2, 2, 4), torch.tensor([[1, 3], [2, 4]])))\n\n\nclass AdditiveAttention(nn.Module):\n \"\"\"加性注意力\"\"\"\n\n def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):\n super(AdditiveAttention, self).__init__(**kwargs)\n self.W_k = nn.Linear(key_size, num_hiddens, bias=False)\n self.W_q = nn.Linear(query_size, num_hiddens, bias=False)\n self.W_v = nn.Linear(num_hiddens, 1, bias=False)\n self.dropout = nn.Dropout(dropout)\n\n def forward(self, queries, keys, values, valid_lens):\n queries, keys = self.W_q(queries), self.W_k(keys)\n features = queries.unsqueeze(2) + keys.unsqueeze(1)\n features = torch.tanh(features)\n scores = self.W_v(features).squeeze(-1)\n print('scores:', scores)\n self.attention_weights = masked_softmax(scores, valid_lens)\n return torch.bmm(self.dropout(self.attention_weights), values)\n\n\n<mask token>\nqueries, keys = torch.normal(0, 1, (2, 1, 20)), torch.ones((2, 10, 2))\nvalues = torch.arange(40, dtype=torch.float32).reshape(1, 10, 4).repeat(2, 1, 1\n )\nvalid_lens = torch.tensor([2, 6])\nattention = AdditiveAttention(key_size=2, query_size=20, num_hiddens=8,\n dropout=0.1)\nattention.eval()\nprint(attention(queries, keys, values, valid_lens))\n\n\nclass DotProductAttention(nn.Module):\n \"\"\"缩放点积注意力\"\"\"\n\n def __init__(self, dropout, **kwargs):\n super(DotProductAttention, self).__init__(**kwargs)\n self.dropout = nn.Dropout(dropout)\n\n def forward(self, queries, keys, values, valid_lens=None):\n d = queries.shape[-1]\n scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)\n self.attention_weights = masked_softmax(scores, valid_lens)\n return torch.bmm(self.dropout(self.attention_weights), values)\n\n\n<mask token>\nqueries = torch.normal(0, 1, (2, 1, 2))\nattention = DotProductAttention(dropout=0.5)\nattention.eval()\nprint(attention(queries, keys, values, valid_lens))\n",
"step-5": "import math\nimport torch\nfrom torch import nn\nfrom d2l import torch as d2l\n\n\ndef masked_softmax(X, valid_lens):\n \"\"\"通过在最后一个轴上掩蔽元素来执行softmax操作\"\"\"\n # X:3D张量,valid_lens:1D或2D张量\n if valid_lens is None:\n return nn.functional.softmax(X, dim=-1)\n else:\n shape = X.shape\n if valid_lens.dim() == 1:\n valid_lens = torch.repeat_interleave(valid_lens, shape[1])\n else:\n valid_lens = valid_lens.reshape(-1)\n # 最后一轴上被掩蔽的元素使用一个非常大的负值替换,从而其softmax输出为0\n X = d2l.sequence_mask(X.reshape(-1, shape[-1]), valid_lens, value=-1e6)\n return nn.functional.softmax(X.reshape(shape), dim=-1)\n\n\n\"\"\"测试softmax掩码\"\"\"\nprint(masked_softmax(torch.rand(2, 2, 4), torch.tensor([2, 3])))\nprint(masked_softmax(torch.rand(2, 2, 4), torch.tensor([[1, 3], [2, 4]])))\n\n\nclass AdditiveAttention(nn.Module):\n \"\"\"加性注意力\"\"\"\n def __init__(self, key_size, query_size, num_hiddens, dropout, **kwargs):\n super(AdditiveAttention, self).__init__(**kwargs)\n self.W_k = nn.Linear(key_size, num_hiddens, bias=False)\n self.W_q = nn.Linear(query_size, num_hiddens, bias=False)\n self.W_v = nn.Linear(num_hiddens, 1, bias=False)\n self.dropout = nn.Dropout(dropout)\n\n def forward(self, queries, keys, values, valid_lens):\n queries, keys = self.W_q(queries), self.W_k(keys)\n # 在维度扩展后,\n # queries的形状:(batch_size,查询的个数,1,num_hidden)\n # keys的形状:(batch_size,1,“键-值”对的个数,num_hiddens)\n # 使用广播方式进行求和\n features = queries.unsqueeze(2) + keys.unsqueeze(1)\n features = torch.tanh(features)\n # self.w_v仅有一个输出,因此从形状中移除最后那个维度。\n # scores的形状:(batch_size,查询的个数,“键-值”对的个数)\n scores = self.W_v(features).squeeze(-1)\n print(\"scores:\", scores)\n self.attention_weights = masked_softmax(scores, valid_lens)\n # values的形状:(batch_size,“键-值”对的个数,值的维度)\n return torch.bmm(self.dropout(self.attention_weights), values)\n\n\n\"\"\"加性注意力函数测试\"\"\"\nqueries, keys = torch.normal(0, 1, (2, 1, 20)), torch.ones((2, 10, 2))\n# values的小批量,两个值矩阵是相同的\nvalues = torch.arange(40, dtype=torch.float32).reshape(1, 10, 4).repeat(2, 1, 1)\nvalid_lens = torch.tensor([2, 6])\n\nattention = AdditiveAttention(key_size=2, query_size=20, num_hiddens=8, dropout=0.1)\nattention.eval()\nprint(attention(queries, keys, values, valid_lens))\n\n\nclass DotProductAttention(nn.Module):\n \"\"\"缩放点积注意力\"\"\"\n def __init__(self, dropout, **kwargs):\n super(DotProductAttention, self).__init__(**kwargs)\n self.dropout = nn.Dropout(dropout)\n\n # queries的形状:(batch_size,查询的个数,d)\n # keys的形状:(batch_size,“键-值”对的个数,d)\n # values的形状:(batch_size,“键-值”对的个数,值的维度)\n # valid_lens的形状:(batch_size,)或者(batch_size,查询的个数)\n def forward(self, queries, keys, values, valid_lens=None):\n d = queries.shape[-1]\n # 设置transpose_b=True为了交换的keys的最后两个维度\n scores = torch.bmm(queries, keys.transpose(1, 2)) / math.sqrt(d)\n self.attention_weights = masked_softmax(scores, valid_lens)\n return torch.bmm(self.dropout(self.attention_weights), values)\n\n\n\"\"\"缩放点积注意力函数测试\"\"\"\nqueries = torch.normal(0, 1, (2, 1, 2))\nattention = DotProductAttention(dropout=0.5)\nattention.eval()\nprint(attention(queries, keys, values, valid_lens))\n\n\n\n\n\n\n\n\n\n\n\n",
"step-ids": [
7,
9,
10,
11,
13
]
}
|
[
7,
9,
10,
11,
13
] |
<|reserved_special_token_0|>
class Test_DatabaseUtils(unittest.TestCase):
def setUp(self):
self.db = DatabaseUtils()
<|reserved_special_token_0|>
def test_getUser(self):
count = self.dataCount()
try:
trueResult = self.db.getUser('username')
print('Test passed')
except:
print('Test failed')
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_updateBookTransaction(self):
testData = 1, 'anything', '2019-01-01'
result = self.db.updateBookTransaction(testData[0], testData[1],
testData[2])
self.assertFalse(result)
<|reserved_special_token_0|>
def test_searchBooksAuthur(self):
result = self.db.searchBooksAuthur('abc')
self.assertFalse(result)
result = self.db.searchBooksAuthur('gavin')
self.assertTrue(result)
def test_searchBooksISBN(self):
result = self.db.searchBooksISBN(1)
self.assertFalse(result)
<|reserved_special_token_0|>
def test_getBook(self):
result = self.db.getBook(1)
self.assertTrue(result)
def test_getBookISBN(self):
result = self.db.getBookISBN(1)
self.assertFalse(result)
<|reserved_special_token_0|>
def test_getReturnBook(self):
result = self.db.getReturnBook(1, 1)
self.assertTrue(result)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Test_DatabaseUtils(unittest.TestCase):
def setUp(self):
self.db = DatabaseUtils()
<|reserved_special_token_0|>
def test_getUser(self):
count = self.dataCount()
try:
trueResult = self.db.getUser('username')
print('Test passed')
except:
print('Test failed')
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_updateBookTransaction(self):
testData = 1, 'anything', '2019-01-01'
result = self.db.updateBookTransaction(testData[0], testData[1],
testData[2])
self.assertFalse(result)
<|reserved_special_token_0|>
def test_searchBooksAuthur(self):
result = self.db.searchBooksAuthur('abc')
self.assertFalse(result)
result = self.db.searchBooksAuthur('gavin')
self.assertTrue(result)
def test_searchBooksISBN(self):
result = self.db.searchBooksISBN(1)
self.assertFalse(result)
def test_listBooks(self):
result = self.db.listBooks()
self.assertTrue(result)
def test_getBook(self):
result = self.db.getBook(1)
self.assertTrue(result)
def test_getBookISBN(self):
result = self.db.getBookISBN(1)
self.assertFalse(result)
<|reserved_special_token_0|>
def test_getReturnBook(self):
result = self.db.getReturnBook(1, 1)
self.assertTrue(result)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Test_DatabaseUtils(unittest.TestCase):
def setUp(self):
self.db = DatabaseUtils()
<|reserved_special_token_0|>
def test_getUser(self):
count = self.dataCount()
try:
trueResult = self.db.getUser('username')
print('Test passed')
except:
print('Test failed')
def test_insertBookTransaction(self):
testData = 1, 1, '2019-01-01', 'abc'
result = self.db.insertBookTransaction(testData[0], testData[1],
testData[2], testData[3])
print('result: ', result)
self.assertTrue(result)
def test_updateBookStatus(self):
testData = 1, 'anything'
result = self.db.updateBookStatus(testData[1], testData[0])
self.assertFalse(result)
def test_updateBookTransaction(self):
testData = 1, 'anything', '2019-01-01'
result = self.db.updateBookTransaction(testData[0], testData[1],
testData[2])
self.assertFalse(result)
def test_searchBooks(self):
result = self.db.searchBooks('abc')
self.assertFalse(result)
result = self.db.searchBooks('Harry')
self.assertTrue(result)
def test_searchBooksAuthur(self):
result = self.db.searchBooksAuthur('abc')
self.assertFalse(result)
result = self.db.searchBooksAuthur('gavin')
self.assertTrue(result)
def test_searchBooksISBN(self):
result = self.db.searchBooksISBN(1)
self.assertFalse(result)
def test_listBooks(self):
result = self.db.listBooks()
self.assertTrue(result)
def test_getBook(self):
result = self.db.getBook(1)
self.assertTrue(result)
def test_getBookISBN(self):
result = self.db.getBookISBN(1)
self.assertFalse(result)
<|reserved_special_token_0|>
def test_getReturnBook(self):
result = self.db.getReturnBook(1, 1)
self.assertTrue(result)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
import unittest
import sys
sys.path.append('..')
from database_utils import DatabaseUtils
class Test_DatabaseUtils(unittest.TestCase):
def setUp(self):
self.db = DatabaseUtils()
def dataCount(self):
with self.db.connection.cursor() as cursor:
cursor.execute('select count(*) from LmsUser')
return cursor.fetchone()[0]
def test_getUser(self):
count = self.dataCount()
try:
trueResult = self.db.getUser('username')
print('Test passed')
except:
print('Test failed')
def test_insertBookTransaction(self):
testData = 1, 1, '2019-01-01', 'abc'
result = self.db.insertBookTransaction(testData[0], testData[1],
testData[2], testData[3])
print('result: ', result)
self.assertTrue(result)
def test_updateBookStatus(self):
testData = 1, 'anything'
result = self.db.updateBookStatus(testData[1], testData[0])
self.assertFalse(result)
def test_updateBookTransaction(self):
testData = 1, 'anything', '2019-01-01'
result = self.db.updateBookTransaction(testData[0], testData[1],
testData[2])
self.assertFalse(result)
def test_searchBooks(self):
result = self.db.searchBooks('abc')
self.assertFalse(result)
result = self.db.searchBooks('Harry')
self.assertTrue(result)
def test_searchBooksAuthur(self):
result = self.db.searchBooksAuthur('abc')
self.assertFalse(result)
result = self.db.searchBooksAuthur('gavin')
self.assertTrue(result)
def test_searchBooksISBN(self):
result = self.db.searchBooksISBN(1)
self.assertFalse(result)
def test_listBooks(self):
result = self.db.listBooks()
self.assertTrue(result)
def test_getBook(self):
result = self.db.getBook(1)
self.assertTrue(result)
def test_getBookISBN(self):
result = self.db.getBookISBN(1)
self.assertFalse(result)
def test_listReturnBooks(self):
result = self.db.listReturnBooks(1)
self.assertTrue(result)
def test_getReturnBook(self):
result = self.db.getReturnBook(1, 1)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
<|reserved_special_token_1|>
# Reference: https://docs.python.org/2/library/unittest.html
import unittest
import sys
sys.path.append('..')
from database_utils import DatabaseUtils
class Test_DatabaseUtils(unittest.TestCase):
def setUp(self):
self.db=DatabaseUtils()
def dataCount(self):
with self.db.connection.cursor() as cursor:
cursor.execute("select count(*) from LmsUser")
return cursor.fetchone()[0]
def test_getUser(self):
count = self.dataCount()
try:
trueResult=self.db.getUser("username")
print("Test passed")
except:
print("Test failed")
def test_insertBookTransaction(self):
testData=(1,1,"2019-01-01","abc")
result=self.db.insertBookTransaction(testData[0],testData[1],testData[2],testData[3])
print("result: ",result)
self.assertTrue(result)
def test_updateBookStatus(self):
testData=(1,"anything")
result=self.db.updateBookStatus(testData[1],testData[0])
self.assertFalse(result)
def test_updateBookTransaction(self):
testData=(1,"anything","2019-01-01")
result=self.db.updateBookTransaction(testData[0],testData[1],testData[2])
self.assertFalse(result)
def test_searchBooks(self):
result=self.db.searchBooks("abc")
self.assertFalse(result)
result=self.db.searchBooks("Harry")
self.assertTrue(result)
def test_searchBooksAuthur(self):
result=self.db.searchBooksAuthur("abc")
self.assertFalse(result)
result=self.db.searchBooksAuthur("gavin")
self.assertTrue(result)
def test_searchBooksISBN(self):
result=self.db.searchBooksISBN(1)
self.assertFalse(result)
def test_listBooks(self):
result=self.db.listBooks()
self.assertTrue(result)
def test_getBook(self):
result=self.db.getBook(1)
self.assertTrue(result)
def test_getBookISBN(self):
result=self.db.getBookISBN(1)
self.assertFalse(result)
def test_listReturnBooks(self):
result=self.db.listReturnBooks(1)
self.assertTrue(result)
def test_getReturnBook(self):
result=self.db.getReturnBook(1,1)
self.assertTrue(result)
if __name__ == "__main__":
unittest.main()
|
flexible
|
{
"blob_id": "ff8e8af72a8eb97a392fcfec5960eed7a2e51f68",
"index": 9211,
"step-1": "<mask token>\n\n\nclass Test_DatabaseUtils(unittest.TestCase):\n\n def setUp(self):\n self.db = DatabaseUtils()\n <mask token>\n\n def test_getUser(self):\n count = self.dataCount()\n try:\n trueResult = self.db.getUser('username')\n print('Test passed')\n except:\n print('Test failed')\n <mask token>\n <mask token>\n\n def test_updateBookTransaction(self):\n testData = 1, 'anything', '2019-01-01'\n result = self.db.updateBookTransaction(testData[0], testData[1],\n testData[2])\n self.assertFalse(result)\n <mask token>\n\n def test_searchBooksAuthur(self):\n result = self.db.searchBooksAuthur('abc')\n self.assertFalse(result)\n result = self.db.searchBooksAuthur('gavin')\n self.assertTrue(result)\n\n def test_searchBooksISBN(self):\n result = self.db.searchBooksISBN(1)\n self.assertFalse(result)\n <mask token>\n\n def test_getBook(self):\n result = self.db.getBook(1)\n self.assertTrue(result)\n\n def test_getBookISBN(self):\n result = self.db.getBookISBN(1)\n self.assertFalse(result)\n <mask token>\n\n def test_getReturnBook(self):\n result = self.db.getReturnBook(1, 1)\n self.assertTrue(result)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Test_DatabaseUtils(unittest.TestCase):\n\n def setUp(self):\n self.db = DatabaseUtils()\n <mask token>\n\n def test_getUser(self):\n count = self.dataCount()\n try:\n trueResult = self.db.getUser('username')\n print('Test passed')\n except:\n print('Test failed')\n <mask token>\n <mask token>\n\n def test_updateBookTransaction(self):\n testData = 1, 'anything', '2019-01-01'\n result = self.db.updateBookTransaction(testData[0], testData[1],\n testData[2])\n self.assertFalse(result)\n <mask token>\n\n def test_searchBooksAuthur(self):\n result = self.db.searchBooksAuthur('abc')\n self.assertFalse(result)\n result = self.db.searchBooksAuthur('gavin')\n self.assertTrue(result)\n\n def test_searchBooksISBN(self):\n result = self.db.searchBooksISBN(1)\n self.assertFalse(result)\n\n def test_listBooks(self):\n result = self.db.listBooks()\n self.assertTrue(result)\n\n def test_getBook(self):\n result = self.db.getBook(1)\n self.assertTrue(result)\n\n def test_getBookISBN(self):\n result = self.db.getBookISBN(1)\n self.assertFalse(result)\n <mask token>\n\n def test_getReturnBook(self):\n result = self.db.getReturnBook(1, 1)\n self.assertTrue(result)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Test_DatabaseUtils(unittest.TestCase):\n\n def setUp(self):\n self.db = DatabaseUtils()\n <mask token>\n\n def test_getUser(self):\n count = self.dataCount()\n try:\n trueResult = self.db.getUser('username')\n print('Test passed')\n except:\n print('Test failed')\n\n def test_insertBookTransaction(self):\n testData = 1, 1, '2019-01-01', 'abc'\n result = self.db.insertBookTransaction(testData[0], testData[1],\n testData[2], testData[3])\n print('result: ', result)\n self.assertTrue(result)\n\n def test_updateBookStatus(self):\n testData = 1, 'anything'\n result = self.db.updateBookStatus(testData[1], testData[0])\n self.assertFalse(result)\n\n def test_updateBookTransaction(self):\n testData = 1, 'anything', '2019-01-01'\n result = self.db.updateBookTransaction(testData[0], testData[1],\n testData[2])\n self.assertFalse(result)\n\n def test_searchBooks(self):\n result = self.db.searchBooks('abc')\n self.assertFalse(result)\n result = self.db.searchBooks('Harry')\n self.assertTrue(result)\n\n def test_searchBooksAuthur(self):\n result = self.db.searchBooksAuthur('abc')\n self.assertFalse(result)\n result = self.db.searchBooksAuthur('gavin')\n self.assertTrue(result)\n\n def test_searchBooksISBN(self):\n result = self.db.searchBooksISBN(1)\n self.assertFalse(result)\n\n def test_listBooks(self):\n result = self.db.listBooks()\n self.assertTrue(result)\n\n def test_getBook(self):\n result = self.db.getBook(1)\n self.assertTrue(result)\n\n def test_getBookISBN(self):\n result = self.db.getBookISBN(1)\n self.assertFalse(result)\n <mask token>\n\n def test_getReturnBook(self):\n result = self.db.getReturnBook(1, 1)\n self.assertTrue(result)\n\n\n<mask token>\n",
"step-4": "import unittest\nimport sys\nsys.path.append('..')\nfrom database_utils import DatabaseUtils\n\n\nclass Test_DatabaseUtils(unittest.TestCase):\n\n def setUp(self):\n self.db = DatabaseUtils()\n\n def dataCount(self):\n with self.db.connection.cursor() as cursor:\n cursor.execute('select count(*) from LmsUser')\n return cursor.fetchone()[0]\n\n def test_getUser(self):\n count = self.dataCount()\n try:\n trueResult = self.db.getUser('username')\n print('Test passed')\n except:\n print('Test failed')\n\n def test_insertBookTransaction(self):\n testData = 1, 1, '2019-01-01', 'abc'\n result = self.db.insertBookTransaction(testData[0], testData[1],\n testData[2], testData[3])\n print('result: ', result)\n self.assertTrue(result)\n\n def test_updateBookStatus(self):\n testData = 1, 'anything'\n result = self.db.updateBookStatus(testData[1], testData[0])\n self.assertFalse(result)\n\n def test_updateBookTransaction(self):\n testData = 1, 'anything', '2019-01-01'\n result = self.db.updateBookTransaction(testData[0], testData[1],\n testData[2])\n self.assertFalse(result)\n\n def test_searchBooks(self):\n result = self.db.searchBooks('abc')\n self.assertFalse(result)\n result = self.db.searchBooks('Harry')\n self.assertTrue(result)\n\n def test_searchBooksAuthur(self):\n result = self.db.searchBooksAuthur('abc')\n self.assertFalse(result)\n result = self.db.searchBooksAuthur('gavin')\n self.assertTrue(result)\n\n def test_searchBooksISBN(self):\n result = self.db.searchBooksISBN(1)\n self.assertFalse(result)\n\n def test_listBooks(self):\n result = self.db.listBooks()\n self.assertTrue(result)\n\n def test_getBook(self):\n result = self.db.getBook(1)\n self.assertTrue(result)\n\n def test_getBookISBN(self):\n result = self.db.getBookISBN(1)\n self.assertFalse(result)\n\n def test_listReturnBooks(self):\n result = self.db.listReturnBooks(1)\n self.assertTrue(result)\n\n def test_getReturnBook(self):\n result = self.db.getReturnBook(1, 1)\n self.assertTrue(result)\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "# Reference: https://docs.python.org/2/library/unittest.html\nimport unittest\nimport sys\nsys.path.append('..')\nfrom database_utils import DatabaseUtils\n\nclass Test_DatabaseUtils(unittest.TestCase):\n def setUp(self):\n self.db=DatabaseUtils()\n \n def dataCount(self):\n with self.db.connection.cursor() as cursor:\n cursor.execute(\"select count(*) from LmsUser\")\n return cursor.fetchone()[0]\n\n def test_getUser(self):\n count = self.dataCount()\n try:\n trueResult=self.db.getUser(\"username\")\n print(\"Test passed\")\n except:\n print(\"Test failed\")\n\n def test_insertBookTransaction(self):\n testData=(1,1,\"2019-01-01\",\"abc\")\n result=self.db.insertBookTransaction(testData[0],testData[1],testData[2],testData[3])\n print(\"result: \",result)\n self.assertTrue(result)\n\n def test_updateBookStatus(self):\n testData=(1,\"anything\")\n result=self.db.updateBookStatus(testData[1],testData[0])\n self.assertFalse(result)\n\n def test_updateBookTransaction(self):\n testData=(1,\"anything\",\"2019-01-01\")\n result=self.db.updateBookTransaction(testData[0],testData[1],testData[2])\n self.assertFalse(result)\n \n def test_searchBooks(self):\n result=self.db.searchBooks(\"abc\")\n self.assertFalse(result)\n result=self.db.searchBooks(\"Harry\")\n self.assertTrue(result)\n \n def test_searchBooksAuthur(self):\n result=self.db.searchBooksAuthur(\"abc\")\n self.assertFalse(result)\n result=self.db.searchBooksAuthur(\"gavin\")\n self.assertTrue(result)\n \n def test_searchBooksISBN(self):\n result=self.db.searchBooksISBN(1)\n self.assertFalse(result)\n\n def test_listBooks(self):\n result=self.db.listBooks()\n self.assertTrue(result)\n\n def test_getBook(self):\n result=self.db.getBook(1)\n self.assertTrue(result)\n\n def test_getBookISBN(self):\n result=self.db.getBookISBN(1)\n self.assertFalse(result)\n\n def test_listReturnBooks(self):\n result=self.db.listReturnBooks(1)\n self.assertTrue(result)\n\n def test_getReturnBook(self):\n result=self.db.getReturnBook(1,1)\n self.assertTrue(result)\n\nif __name__ == \"__main__\":\n unittest.main()",
"step-ids": [
9,
10,
13,
17,
18
]
}
|
[
9,
10,
13,
17,
18
] |
<|reserved_special_token_0|>
class BlockDeviceTestCase(test.NoDBTestCase):
<|reserved_special_token_0|>
def test_properties(self):
root_device0 = '/dev/sda'
root_device1 = '/dev/sdb'
mappings = [{'virtual': 'root', 'device': root_device0}]
properties0 = {'mappings': mappings}
properties1 = {'mappings': mappings, 'root_device_name': root_device1}
self.assertIsNone(block_device.properties_root_device_name({}))
self.assertEqual(root_device0, block_device.
properties_root_device_name(properties0))
self.assertEqual(root_device1, block_device.
properties_root_device_name(properties1))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_validate_device_name(self):
for value in [' ', 10, None, 'a' * 260]:
self.assertRaises(exception.InvalidBDMFormat, block_device.
validate_device_name, value)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class TestBlockDeviceDict(test.NoDBTestCase):
def setUp(self):
super(TestBlockDeviceDict, self).setUp()
BDM = block_device.BlockDeviceDict
self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'source_type': 'blank',
'destination_type': 'local', 'delete_on_termination': True,
'guest_format': 'swap', 'boot_index': -1}, {'id': 2,
'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',
'source_type': 'blank', 'destination_type': 'local',
'delete_on_termination': True, 'boot_index': -1}, {'id': 3,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',
'source_type': 'volume', 'destination_type': 'volume', 'uuid':
'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda2', 'source_type':
'snapshot', 'destination_type': 'volume', 'uuid':
'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,
'instance_uuid': uuids.instance, 'no_device': True,
'device_name': '/dev/vdc'}]
self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'source_type': 'blank',
'destination_type': 'local', 'delete_on_termination': True,
'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,
'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',
'source_type': 'blank', 'destination_type': 'local',
'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',
'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'connection_info':
"{'fake': 'connection_info'}", 'boot_index': 0}), BDM({'id': 4,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',
'source_type': 'snapshot', 'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}", 'snapshot_id':
'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',
'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.
instance, 'no_device': True, 'device_name': '/dev/vdc'})]
self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'delete_on_termination': True,
'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.
instance, 'device_name': '/dev/sdc1', 'delete_on_termination':
True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda1', 'volume_id':
'fake-volume-id-1', 'connection_info':
"{'fake': 'connection_info'}"}, {'id': 4, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda2', 'connection_info':
"{'fake': 'connection_info'}", 'snapshot_id':
'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id':
5, 'instance_uuid': uuids.instance, 'no_device': True,
'device_name': '/dev/vdc'}]
self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda3', 'source_type':
'image', 'destination_type': 'volume', 'connection_info':
"{'fake': 'connection_info'}", 'volume_id': 'fake-volume-id-3',
'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.
instance, 'device_name': '/dev/sda4', 'source_type': 'image',
'destination_type': 'local', 'connection_info':
"{'fake': 'connection_info'}", 'image_id': 'fake-image-id-2',
'boot_index': -1})]
self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda3', 'connection_info':
"{'fake': 'connection_info'}", 'volume_id': 'fake-volume-id-3'}]
def test_init(self):
def fake_validate(obj, dct):
pass
self.stub_out('nova.block_device.BlockDeviceDict._fields', set([
'field1', 'field2']))
self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',
set(['db_field1', 'db_field2']))
self.stub_out('nova.block_device.BlockDeviceDict._validate',
fake_validate)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':
'bar', 'db_field1': 'baz'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},
do_not_default=set(['field2']))
self.assertIn('field1', dev_dict)
self.assertNotIn('field2', dev_dict)
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict(field1='foo')
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'
)
self.assertEqual('foo', dev_dict['field1'])
self.assertEqual('bar', dev_dict['field2'])
def test_init_prepend_dev_to_device_name(self):
bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':
'vda', 'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vda', bdm_dict['device_name'])
bdm['device_name'] = '/dev/vdb'
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vdb', bdm_dict['device_name'])
bdm['device_name'] = None
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertIsNone(bdm_dict['device_name'])
def test_init_boolify_delete_on_termination(self):
bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':
'vda', 'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertFalse(bdm_dict['delete_on_termination'])
def test_validate(self):
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, {'bogus_field': 'lame_val'})
lame_bdm = dict(self.new_mapping[2])
del lame_bdm['source_type']
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_bdm)
lame_bdm['no_device'] = True
block_device.BlockDeviceDict(lame_bdm)
lame_dev_bdm = dict(self.new_mapping[2])
lame_dev_bdm['device_name'] = 'not a valid name'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_dev_bdm)
lame_dev_bdm['device_name'] = ''
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_dev_bdm)
cool_volume_size_bdm = dict(self.new_mapping[2])
cool_volume_size_bdm['volume_size'] = '42'
cool_volume_size_bdm = block_device.BlockDeviceDict(
cool_volume_size_bdm)
self.assertEqual(42, cool_volume_size_bdm['volume_size'])
lame_volume_size_bdm = dict(self.new_mapping[2])
lame_volume_size_bdm['volume_size'] = 'some_non_int_string'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_volume_size_bdm)
truthy_bdm = dict(self.new_mapping[2])
truthy_bdm['delete_on_termination'] = '1'
truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)
self.assertTrue(truthy_bdm['delete_on_termination'])
verbose_bdm = dict(self.new_mapping[2])
verbose_bdm['boot_index'] = 'first'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, verbose_bdm)
def test_from_legacy(self):
for legacy, new in zip(self.legacy_mapping, self.new_mapping):
self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy
), matchers.IsSubDictOf(new))
def test_from_legacy_mapping(self):
def _get_image_bdms(bdms):
return [bdm for bdm in bdms if bdm['source_type'] == 'image']
def _get_bootable_bdms(bdms):
return [bdm for bdm in bdms if bdm['boot_index'] is not None and
bdm['boot_index'] >= 0]
new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)
self.assertEqual(0, len(_get_image_bdms(new_no_img)))
for new, expected in zip(new_no_img, self.new_mapping):
self.assertThat(new, matchers.IsSubDictOf(expected))
new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,
'fake_image_ref')
image_bdms = _get_image_bdms(new_with_img)
boot_bdms = _get_bootable_bdms(new_with_img)
self.assertEqual(1, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, image_bdms[0]['boot_index'])
self.assertEqual('image', boot_bdms[0]['source_type'])
new_with_img_and_root = block_device.from_legacy_mapping(self.
legacy_mapping, 'fake_image_ref', 'sda1')
image_bdms = _get_image_bdms(new_with_img_and_root)
boot_bdms = _get_bootable_bdms(new_with_img_and_root)
self.assertEqual(0, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, boot_bdms[0]['boot_index'])
self.assertEqual('volume', boot_bdms[0]['source_type'])
new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,
'fake_image_ref', 'sda1', no_root=True)
self.assertEqual(0, len(_get_image_bdms(new_no_root)))
self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))
def test_from_api(self):
for api, new in zip(self.api_mapping, self.new_mapping):
new['connection_info'] = None
if new['snapshot_id']:
new['volume_id'] = None
self.assertThat(block_device.BlockDeviceDict.from_api(api,
False), matchers.IsSubDictOf(new))
def test_from_api_invalid_blank_id(self):
api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':
'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination':
True, 'boot_index': -1}
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
def test_from_api_invalid_source_to_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'uuid': 'fake-volume-id-1'}
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
def test_from_api_valid_source_to_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,
'boot_index': 0}
retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':
'image', 'image_id': 1, 'destination_type': 'local',
'volume_id': 'fake-volume-id-1', 'boot_index': 0})
self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(
api_dict, True))
def test_from_api_valid_source_to_local_mapping_with_string_bi(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,
'boot_index': '0'}
retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':
'image', 'image_id': 1, 'destination_type': 'local',
'volume_id': 'fake-volume-id-1', 'boot_index': 0})
self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(
api_dict, True))
def test_from_api_invalid_image_to_destination_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'uuid': 'fake-volume-id-1', 'volume_type':
'fake-lvm-1', 'boot_index': 1}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn('Mapping image to local is not supported', str(ex))
def test_from_api_invalid_volume_type_to_destination_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':
'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn(
'Specifying a volume_type with destination_type=local is not supported'
, str(ex))
def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(
self):
api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':
'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn(
'Specifying volume type to existing volume is not supported',
str(ex))
def test_image_mapping(self):
removed_fields = ['id', 'instance_uuid', 'connection_info',
'created_at', 'updated_at', 'deleted_at', 'deleted']
for bdm in self.new_mapping:
mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm
).get_image_mapping()
for fld in removed_fields:
self.assertNotIn(fld, mapping_bdm)
def _test_snapshot_from_bdm(self, template):
snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)
self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])
self.assertEqual('snapshot', snapshot['source_type'])
self.assertEqual('volume', snapshot['destination_type'])
self.assertEqual(template.volume_size, snapshot['volume_size'])
self.assertEqual(template.delete_on_termination, snapshot[
'delete_on_termination'])
self.assertEqual(template.device_name, snapshot['device_name'])
for key in ['disk_bus', 'device_type', 'boot_index']:
self.assertEqual(template[key], snapshot[key])
def test_snapshot_from_bdm(self):
for bdm in self.new_mapping:
self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))
def test_snapshot_from_object(self):
for bdm in self.new_mapping[:-1]:
obj = objects.BlockDeviceMapping()
obj = objects.BlockDeviceMapping._from_db_object(None, obj,
fake_block_device.FakeDbBlockDeviceDict(bdm))
self._test_snapshot_from_bdm(obj)
class GetBDMImageMetadataTestCase(test.NoDBTestCase):
def setUp(self):
super().setUp()
self.compute_api = compute_api.API()
self.context = context.RequestContext('fake', 'fake')
def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':
'1', 'delete_on_termination': False}]
expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},
'size': 0, 'status': 'active'}
def get_vol_data(*args, **kwargs):
return {'bootable': is_bootable}
with mock.patch.object(self.compute_api.volume_api, 'get',
side_effect=get_vol_data):
if not is_bootable:
self.assertRaises(exception.InvalidBDMVolumeNotBootable,
block_device.get_bdm_image_metadata, self.context, self
.compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
else:
meta = block_device.get_bdm_image_metadata(self.context,
self.compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(expected_meta, meta)
def test_get_bdm_image_metadata__non_bootable(self):
self._test_get_bdm_image_metadata__bootable(False)
def test_get_bdm_image_metadata__bootable(self):
self._test_get_bdm_image_metadata__bootable(True)
def test_get_bdm_image_metadata__basic_property(self):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':
'1', 'delete_on_termination': False}]
fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':
128, 'foo': 'bar'}}
with mock.patch.object(self.compute_api.volume_api, 'get',
return_value=fake_volume):
meta = block_device.get_bdm_image_metadata(self.context, self.
compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
def test_get_bdm_image_metadata__snapshot_basic_property(self):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':
None, 'delete_on_termination': False}]
fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':
128, 'foo': 'bar'}}
fake_snapshot = {'volume_id': '1'}
with test.nested(mock.patch.object(self.compute_api.volume_api,
'get', return_value=fake_volume), mock.patch.object(self.
compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)
) as (volume_get, volume_get_snapshot):
meta = block_device.get_bdm_image_metadata(self.context, self.
compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
volume_get_snapshot.assert_called_once_with(self.context,
block_device_mapping[0]['snapshot_id'])
volume_get.assert_called_once_with(self.context, fake_snapshot[
'volume_id'])
@mock.patch.object(cinder.API, 'get', side_effect=exception.
CinderConnectionFailed(reason='error'))
def test_get_bdm_image_metadata__cinder_down(self, mock_get):
bdms = [objects.BlockDeviceMapping(**fake_block_device.
FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':
'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]
self.assertRaises(exception.CinderConnectionFailed, block_device.
get_bdm_image_metadata, self.context, self.compute_api.
image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)
class GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):
def test_inherit_image_properties(self):
properties = {'fake_prop': 'fake_value'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(properties, image_meta['properties'])
def test_image_size(self):
volume = {'size': 10}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(10 * units.Gi, image_meta['size'])
def test_image_status(self):
volume = {}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual('active', image_meta['status'])
def test_values_conversion(self):
properties = {'min_ram': '5', 'min_disk': '7'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(5, image_meta['min_ram'])
self.assertEqual(7, image_meta['min_disk'])
def test_suppress_not_image_properties(self):
properties = {'min_ram': '256', 'min_disk': '128', 'image_id':
'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',
'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual({}, image_meta['properties'])
self.assertEqual(0, image_meta['size'])
self.assertNotEqual({}, properties)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BlockDeviceTestCase(test.NoDBTestCase):
<|reserved_special_token_0|>
def test_properties(self):
root_device0 = '/dev/sda'
root_device1 = '/dev/sdb'
mappings = [{'virtual': 'root', 'device': root_device0}]
properties0 = {'mappings': mappings}
properties1 = {'mappings': mappings, 'root_device_name': root_device1}
self.assertIsNone(block_device.properties_root_device_name({}))
self.assertEqual(root_device0, block_device.
properties_root_device_name(properties0))
self.assertEqual(root_device1, block_device.
properties_root_device_name(properties1))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_new_format_is_swap(self):
expected_results = [True, False, False, False, False]
for expected, bdm in zip(expected_results, self.new_mapping):
res = block_device.new_format_is_swap(bdm)
self.assertEqual(expected, res)
<|reserved_special_token_0|>
def test_validate_device_name(self):
for value in [' ', 10, None, 'a' * 260]:
self.assertRaises(exception.InvalidBDMFormat, block_device.
validate_device_name, value)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class TestBlockDeviceDict(test.NoDBTestCase):
def setUp(self):
super(TestBlockDeviceDict, self).setUp()
BDM = block_device.BlockDeviceDict
self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'source_type': 'blank',
'destination_type': 'local', 'delete_on_termination': True,
'guest_format': 'swap', 'boot_index': -1}, {'id': 2,
'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',
'source_type': 'blank', 'destination_type': 'local',
'delete_on_termination': True, 'boot_index': -1}, {'id': 3,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',
'source_type': 'volume', 'destination_type': 'volume', 'uuid':
'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda2', 'source_type':
'snapshot', 'destination_type': 'volume', 'uuid':
'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,
'instance_uuid': uuids.instance, 'no_device': True,
'device_name': '/dev/vdc'}]
self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'source_type': 'blank',
'destination_type': 'local', 'delete_on_termination': True,
'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,
'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',
'source_type': 'blank', 'destination_type': 'local',
'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',
'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'connection_info':
"{'fake': 'connection_info'}", 'boot_index': 0}), BDM({'id': 4,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',
'source_type': 'snapshot', 'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}", 'snapshot_id':
'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',
'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.
instance, 'no_device': True, 'device_name': '/dev/vdc'})]
self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'delete_on_termination': True,
'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.
instance, 'device_name': '/dev/sdc1', 'delete_on_termination':
True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda1', 'volume_id':
'fake-volume-id-1', 'connection_info':
"{'fake': 'connection_info'}"}, {'id': 4, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda2', 'connection_info':
"{'fake': 'connection_info'}", 'snapshot_id':
'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id':
5, 'instance_uuid': uuids.instance, 'no_device': True,
'device_name': '/dev/vdc'}]
self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda3', 'source_type':
'image', 'destination_type': 'volume', 'connection_info':
"{'fake': 'connection_info'}", 'volume_id': 'fake-volume-id-3',
'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.
instance, 'device_name': '/dev/sda4', 'source_type': 'image',
'destination_type': 'local', 'connection_info':
"{'fake': 'connection_info'}", 'image_id': 'fake-image-id-2',
'boot_index': -1})]
self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda3', 'connection_info':
"{'fake': 'connection_info'}", 'volume_id': 'fake-volume-id-3'}]
def test_init(self):
def fake_validate(obj, dct):
pass
self.stub_out('nova.block_device.BlockDeviceDict._fields', set([
'field1', 'field2']))
self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',
set(['db_field1', 'db_field2']))
self.stub_out('nova.block_device.BlockDeviceDict._validate',
fake_validate)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':
'bar', 'db_field1': 'baz'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},
do_not_default=set(['field2']))
self.assertIn('field1', dev_dict)
self.assertNotIn('field2', dev_dict)
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict(field1='foo')
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'
)
self.assertEqual('foo', dev_dict['field1'])
self.assertEqual('bar', dev_dict['field2'])
def test_init_prepend_dev_to_device_name(self):
bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':
'vda', 'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vda', bdm_dict['device_name'])
bdm['device_name'] = '/dev/vdb'
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vdb', bdm_dict['device_name'])
bdm['device_name'] = None
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertIsNone(bdm_dict['device_name'])
def test_init_boolify_delete_on_termination(self):
bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':
'vda', 'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertFalse(bdm_dict['delete_on_termination'])
def test_validate(self):
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, {'bogus_field': 'lame_val'})
lame_bdm = dict(self.new_mapping[2])
del lame_bdm['source_type']
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_bdm)
lame_bdm['no_device'] = True
block_device.BlockDeviceDict(lame_bdm)
lame_dev_bdm = dict(self.new_mapping[2])
lame_dev_bdm['device_name'] = 'not a valid name'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_dev_bdm)
lame_dev_bdm['device_name'] = ''
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_dev_bdm)
cool_volume_size_bdm = dict(self.new_mapping[2])
cool_volume_size_bdm['volume_size'] = '42'
cool_volume_size_bdm = block_device.BlockDeviceDict(
cool_volume_size_bdm)
self.assertEqual(42, cool_volume_size_bdm['volume_size'])
lame_volume_size_bdm = dict(self.new_mapping[2])
lame_volume_size_bdm['volume_size'] = 'some_non_int_string'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_volume_size_bdm)
truthy_bdm = dict(self.new_mapping[2])
truthy_bdm['delete_on_termination'] = '1'
truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)
self.assertTrue(truthy_bdm['delete_on_termination'])
verbose_bdm = dict(self.new_mapping[2])
verbose_bdm['boot_index'] = 'first'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, verbose_bdm)
def test_from_legacy(self):
for legacy, new in zip(self.legacy_mapping, self.new_mapping):
self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy
), matchers.IsSubDictOf(new))
def test_from_legacy_mapping(self):
def _get_image_bdms(bdms):
return [bdm for bdm in bdms if bdm['source_type'] == 'image']
def _get_bootable_bdms(bdms):
return [bdm for bdm in bdms if bdm['boot_index'] is not None and
bdm['boot_index'] >= 0]
new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)
self.assertEqual(0, len(_get_image_bdms(new_no_img)))
for new, expected in zip(new_no_img, self.new_mapping):
self.assertThat(new, matchers.IsSubDictOf(expected))
new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,
'fake_image_ref')
image_bdms = _get_image_bdms(new_with_img)
boot_bdms = _get_bootable_bdms(new_with_img)
self.assertEqual(1, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, image_bdms[0]['boot_index'])
self.assertEqual('image', boot_bdms[0]['source_type'])
new_with_img_and_root = block_device.from_legacy_mapping(self.
legacy_mapping, 'fake_image_ref', 'sda1')
image_bdms = _get_image_bdms(new_with_img_and_root)
boot_bdms = _get_bootable_bdms(new_with_img_and_root)
self.assertEqual(0, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, boot_bdms[0]['boot_index'])
self.assertEqual('volume', boot_bdms[0]['source_type'])
new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,
'fake_image_ref', 'sda1', no_root=True)
self.assertEqual(0, len(_get_image_bdms(new_no_root)))
self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))
def test_from_api(self):
for api, new in zip(self.api_mapping, self.new_mapping):
new['connection_info'] = None
if new['snapshot_id']:
new['volume_id'] = None
self.assertThat(block_device.BlockDeviceDict.from_api(api,
False), matchers.IsSubDictOf(new))
def test_from_api_invalid_blank_id(self):
api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':
'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination':
True, 'boot_index': -1}
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
def test_from_api_invalid_source_to_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'uuid': 'fake-volume-id-1'}
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
def test_from_api_valid_source_to_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,
'boot_index': 0}
retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':
'image', 'image_id': 1, 'destination_type': 'local',
'volume_id': 'fake-volume-id-1', 'boot_index': 0})
self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(
api_dict, True))
def test_from_api_valid_source_to_local_mapping_with_string_bi(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,
'boot_index': '0'}
retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':
'image', 'image_id': 1, 'destination_type': 'local',
'volume_id': 'fake-volume-id-1', 'boot_index': 0})
self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(
api_dict, True))
def test_from_api_invalid_image_to_destination_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'uuid': 'fake-volume-id-1', 'volume_type':
'fake-lvm-1', 'boot_index': 1}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn('Mapping image to local is not supported', str(ex))
def test_from_api_invalid_volume_type_to_destination_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':
'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn(
'Specifying a volume_type with destination_type=local is not supported'
, str(ex))
def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(
self):
api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':
'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn(
'Specifying volume type to existing volume is not supported',
str(ex))
def test_image_mapping(self):
removed_fields = ['id', 'instance_uuid', 'connection_info',
'created_at', 'updated_at', 'deleted_at', 'deleted']
for bdm in self.new_mapping:
mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm
).get_image_mapping()
for fld in removed_fields:
self.assertNotIn(fld, mapping_bdm)
def _test_snapshot_from_bdm(self, template):
snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)
self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])
self.assertEqual('snapshot', snapshot['source_type'])
self.assertEqual('volume', snapshot['destination_type'])
self.assertEqual(template.volume_size, snapshot['volume_size'])
self.assertEqual(template.delete_on_termination, snapshot[
'delete_on_termination'])
self.assertEqual(template.device_name, snapshot['device_name'])
for key in ['disk_bus', 'device_type', 'boot_index']:
self.assertEqual(template[key], snapshot[key])
def test_snapshot_from_bdm(self):
for bdm in self.new_mapping:
self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))
def test_snapshot_from_object(self):
for bdm in self.new_mapping[:-1]:
obj = objects.BlockDeviceMapping()
obj = objects.BlockDeviceMapping._from_db_object(None, obj,
fake_block_device.FakeDbBlockDeviceDict(bdm))
self._test_snapshot_from_bdm(obj)
class GetBDMImageMetadataTestCase(test.NoDBTestCase):
def setUp(self):
super().setUp()
self.compute_api = compute_api.API()
self.context = context.RequestContext('fake', 'fake')
def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':
'1', 'delete_on_termination': False}]
expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},
'size': 0, 'status': 'active'}
def get_vol_data(*args, **kwargs):
return {'bootable': is_bootable}
with mock.patch.object(self.compute_api.volume_api, 'get',
side_effect=get_vol_data):
if not is_bootable:
self.assertRaises(exception.InvalidBDMVolumeNotBootable,
block_device.get_bdm_image_metadata, self.context, self
.compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
else:
meta = block_device.get_bdm_image_metadata(self.context,
self.compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(expected_meta, meta)
def test_get_bdm_image_metadata__non_bootable(self):
self._test_get_bdm_image_metadata__bootable(False)
def test_get_bdm_image_metadata__bootable(self):
self._test_get_bdm_image_metadata__bootable(True)
def test_get_bdm_image_metadata__basic_property(self):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':
'1', 'delete_on_termination': False}]
fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':
128, 'foo': 'bar'}}
with mock.patch.object(self.compute_api.volume_api, 'get',
return_value=fake_volume):
meta = block_device.get_bdm_image_metadata(self.context, self.
compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
def test_get_bdm_image_metadata__snapshot_basic_property(self):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':
None, 'delete_on_termination': False}]
fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':
128, 'foo': 'bar'}}
fake_snapshot = {'volume_id': '1'}
with test.nested(mock.patch.object(self.compute_api.volume_api,
'get', return_value=fake_volume), mock.patch.object(self.
compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)
) as (volume_get, volume_get_snapshot):
meta = block_device.get_bdm_image_metadata(self.context, self.
compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
volume_get_snapshot.assert_called_once_with(self.context,
block_device_mapping[0]['snapshot_id'])
volume_get.assert_called_once_with(self.context, fake_snapshot[
'volume_id'])
@mock.patch.object(cinder.API, 'get', side_effect=exception.
CinderConnectionFailed(reason='error'))
def test_get_bdm_image_metadata__cinder_down(self, mock_get):
bdms = [objects.BlockDeviceMapping(**fake_block_device.
FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':
'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]
self.assertRaises(exception.CinderConnectionFailed, block_device.
get_bdm_image_metadata, self.context, self.compute_api.
image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)
class GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):
def test_inherit_image_properties(self):
properties = {'fake_prop': 'fake_value'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(properties, image_meta['properties'])
def test_image_size(self):
volume = {'size': 10}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(10 * units.Gi, image_meta['size'])
def test_image_status(self):
volume = {}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual('active', image_meta['status'])
def test_values_conversion(self):
properties = {'min_ram': '5', 'min_disk': '7'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(5, image_meta['min_ram'])
self.assertEqual(7, image_meta['min_disk'])
def test_suppress_not_image_properties(self):
properties = {'min_ram': '256', 'min_disk': '128', 'image_id':
'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',
'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual({}, image_meta['properties'])
self.assertEqual(0, image_meta['size'])
self.assertNotEqual({}, properties)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BlockDeviceTestCase(test.NoDBTestCase):
<|reserved_special_token_0|>
def test_properties(self):
root_device0 = '/dev/sda'
root_device1 = '/dev/sdb'
mappings = [{'virtual': 'root', 'device': root_device0}]
properties0 = {'mappings': mappings}
properties1 = {'mappings': mappings, 'root_device_name': root_device1}
self.assertIsNone(block_device.properties_root_device_name({}))
self.assertEqual(root_device0, block_device.
properties_root_device_name(properties0))
self.assertEqual(root_device1, block_device.
properties_root_device_name(properties1))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_strip_dev(self):
self.assertEqual('sda', block_device.strip_dev('/dev/sda'))
self.assertEqual('sda', block_device.strip_dev('sda'))
self.assertIsNone(block_device.strip_dev(None))
<|reserved_special_token_0|>
def test_get_device_letter(self):
self.assertEqual('', block_device.get_device_letter(''))
self.assertEqual('a', block_device.get_device_letter('/dev/sda1'))
self.assertEqual('b', block_device.get_device_letter('/dev/xvdb'))
self.assertEqual('d', block_device.get_device_letter('/dev/d'))
self.assertEqual('a', block_device.get_device_letter('a'))
self.assertEqual('b', block_device.get_device_letter('sdb2'))
self.assertEqual('c', block_device.get_device_letter('vdc'))
self.assertEqual('c', block_device.get_device_letter('hdc'))
self.assertIsNone(block_device.get_device_letter(None))
def test_generate_device_name(self):
expected = ('vda', ('vd', 0)), ('vdaa', ('vd', 26)), ('vdabc', (
'vd', 730)), ('vdidpok', ('vd', 4194304)), ('sdc', ('sd', 2)), (
'sdaa', ('sd', 26)), ('sdiw', ('sd', 256)), ('hdzz', ('hd', 701))
for res, args in expected:
self.assertEqual(res, block_device.generate_device_name(*args))
<|reserved_special_token_0|>
def test_get_root_bdm(self):
root_bdm = {'device_name': 'vda', 'boot_index': 0}
bdms = [root_bdm, {'device_name': 'vdb', 'boot_index': 1}, {
'device_name': 'vdc', 'boot_index': -1}, {'device_name': 'vdd'}]
self.assertEqual(root_bdm, block_device.get_root_bdm(bdms))
self.assertEqual(root_bdm, block_device.get_root_bdm([bdms[0]]))
self.assertIsNone(block_device.get_root_bdm(bdms[1:]))
self.assertIsNone(block_device.get_root_bdm(bdms[2:]))
self.assertIsNone(block_device.get_root_bdm(bdms[3:]))
self.assertIsNone(block_device.get_root_bdm([]))
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test_get_bdm_local_disk_num(self):
size = block_device.get_bdm_local_disk_num(self.new_mapping)
self.assertEqual(2, size)
def test_new_format_is_swap(self):
expected_results = [True, False, False, False, False]
for expected, bdm in zip(expected_results, self.new_mapping):
res = block_device.new_format_is_swap(bdm)
self.assertEqual(expected, res)
<|reserved_special_token_0|>
def test_validate_device_name(self):
for value in [' ', 10, None, 'a' * 260]:
self.assertRaises(exception.InvalidBDMFormat, block_device.
validate_device_name, value)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class TestBlockDeviceDict(test.NoDBTestCase):
def setUp(self):
super(TestBlockDeviceDict, self).setUp()
BDM = block_device.BlockDeviceDict
self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'source_type': 'blank',
'destination_type': 'local', 'delete_on_termination': True,
'guest_format': 'swap', 'boot_index': -1}, {'id': 2,
'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',
'source_type': 'blank', 'destination_type': 'local',
'delete_on_termination': True, 'boot_index': -1}, {'id': 3,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',
'source_type': 'volume', 'destination_type': 'volume', 'uuid':
'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda2', 'source_type':
'snapshot', 'destination_type': 'volume', 'uuid':
'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,
'instance_uuid': uuids.instance, 'no_device': True,
'device_name': '/dev/vdc'}]
self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'source_type': 'blank',
'destination_type': 'local', 'delete_on_termination': True,
'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,
'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',
'source_type': 'blank', 'destination_type': 'local',
'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',
'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'connection_info':
"{'fake': 'connection_info'}", 'boot_index': 0}), BDM({'id': 4,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',
'source_type': 'snapshot', 'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}", 'snapshot_id':
'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',
'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.
instance, 'no_device': True, 'device_name': '/dev/vdc'})]
self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'delete_on_termination': True,
'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.
instance, 'device_name': '/dev/sdc1', 'delete_on_termination':
True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda1', 'volume_id':
'fake-volume-id-1', 'connection_info':
"{'fake': 'connection_info'}"}, {'id': 4, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda2', 'connection_info':
"{'fake': 'connection_info'}", 'snapshot_id':
'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id':
5, 'instance_uuid': uuids.instance, 'no_device': True,
'device_name': '/dev/vdc'}]
self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda3', 'source_type':
'image', 'destination_type': 'volume', 'connection_info':
"{'fake': 'connection_info'}", 'volume_id': 'fake-volume-id-3',
'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.
instance, 'device_name': '/dev/sda4', 'source_type': 'image',
'destination_type': 'local', 'connection_info':
"{'fake': 'connection_info'}", 'image_id': 'fake-image-id-2',
'boot_index': -1})]
self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda3', 'connection_info':
"{'fake': 'connection_info'}", 'volume_id': 'fake-volume-id-3'}]
def test_init(self):
def fake_validate(obj, dct):
pass
self.stub_out('nova.block_device.BlockDeviceDict._fields', set([
'field1', 'field2']))
self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',
set(['db_field1', 'db_field2']))
self.stub_out('nova.block_device.BlockDeviceDict._validate',
fake_validate)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':
'bar', 'db_field1': 'baz'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},
do_not_default=set(['field2']))
self.assertIn('field1', dev_dict)
self.assertNotIn('field2', dev_dict)
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict(field1='foo')
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'
)
self.assertEqual('foo', dev_dict['field1'])
self.assertEqual('bar', dev_dict['field2'])
def test_init_prepend_dev_to_device_name(self):
bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':
'vda', 'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vda', bdm_dict['device_name'])
bdm['device_name'] = '/dev/vdb'
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vdb', bdm_dict['device_name'])
bdm['device_name'] = None
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertIsNone(bdm_dict['device_name'])
def test_init_boolify_delete_on_termination(self):
bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':
'vda', 'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertFalse(bdm_dict['delete_on_termination'])
def test_validate(self):
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, {'bogus_field': 'lame_val'})
lame_bdm = dict(self.new_mapping[2])
del lame_bdm['source_type']
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_bdm)
lame_bdm['no_device'] = True
block_device.BlockDeviceDict(lame_bdm)
lame_dev_bdm = dict(self.new_mapping[2])
lame_dev_bdm['device_name'] = 'not a valid name'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_dev_bdm)
lame_dev_bdm['device_name'] = ''
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_dev_bdm)
cool_volume_size_bdm = dict(self.new_mapping[2])
cool_volume_size_bdm['volume_size'] = '42'
cool_volume_size_bdm = block_device.BlockDeviceDict(
cool_volume_size_bdm)
self.assertEqual(42, cool_volume_size_bdm['volume_size'])
lame_volume_size_bdm = dict(self.new_mapping[2])
lame_volume_size_bdm['volume_size'] = 'some_non_int_string'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_volume_size_bdm)
truthy_bdm = dict(self.new_mapping[2])
truthy_bdm['delete_on_termination'] = '1'
truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)
self.assertTrue(truthy_bdm['delete_on_termination'])
verbose_bdm = dict(self.new_mapping[2])
verbose_bdm['boot_index'] = 'first'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, verbose_bdm)
def test_from_legacy(self):
for legacy, new in zip(self.legacy_mapping, self.new_mapping):
self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy
), matchers.IsSubDictOf(new))
def test_from_legacy_mapping(self):
def _get_image_bdms(bdms):
return [bdm for bdm in bdms if bdm['source_type'] == 'image']
def _get_bootable_bdms(bdms):
return [bdm for bdm in bdms if bdm['boot_index'] is not None and
bdm['boot_index'] >= 0]
new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)
self.assertEqual(0, len(_get_image_bdms(new_no_img)))
for new, expected in zip(new_no_img, self.new_mapping):
self.assertThat(new, matchers.IsSubDictOf(expected))
new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,
'fake_image_ref')
image_bdms = _get_image_bdms(new_with_img)
boot_bdms = _get_bootable_bdms(new_with_img)
self.assertEqual(1, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, image_bdms[0]['boot_index'])
self.assertEqual('image', boot_bdms[0]['source_type'])
new_with_img_and_root = block_device.from_legacy_mapping(self.
legacy_mapping, 'fake_image_ref', 'sda1')
image_bdms = _get_image_bdms(new_with_img_and_root)
boot_bdms = _get_bootable_bdms(new_with_img_and_root)
self.assertEqual(0, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, boot_bdms[0]['boot_index'])
self.assertEqual('volume', boot_bdms[0]['source_type'])
new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,
'fake_image_ref', 'sda1', no_root=True)
self.assertEqual(0, len(_get_image_bdms(new_no_root)))
self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))
def test_from_api(self):
for api, new in zip(self.api_mapping, self.new_mapping):
new['connection_info'] = None
if new['snapshot_id']:
new['volume_id'] = None
self.assertThat(block_device.BlockDeviceDict.from_api(api,
False), matchers.IsSubDictOf(new))
def test_from_api_invalid_blank_id(self):
api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':
'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination':
True, 'boot_index': -1}
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
def test_from_api_invalid_source_to_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'uuid': 'fake-volume-id-1'}
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
def test_from_api_valid_source_to_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,
'boot_index': 0}
retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':
'image', 'image_id': 1, 'destination_type': 'local',
'volume_id': 'fake-volume-id-1', 'boot_index': 0})
self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(
api_dict, True))
def test_from_api_valid_source_to_local_mapping_with_string_bi(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,
'boot_index': '0'}
retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':
'image', 'image_id': 1, 'destination_type': 'local',
'volume_id': 'fake-volume-id-1', 'boot_index': 0})
self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(
api_dict, True))
def test_from_api_invalid_image_to_destination_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'uuid': 'fake-volume-id-1', 'volume_type':
'fake-lvm-1', 'boot_index': 1}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn('Mapping image to local is not supported', str(ex))
def test_from_api_invalid_volume_type_to_destination_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':
'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn(
'Specifying a volume_type with destination_type=local is not supported'
, str(ex))
def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(
self):
api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':
'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn(
'Specifying volume type to existing volume is not supported',
str(ex))
def test_image_mapping(self):
removed_fields = ['id', 'instance_uuid', 'connection_info',
'created_at', 'updated_at', 'deleted_at', 'deleted']
for bdm in self.new_mapping:
mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm
).get_image_mapping()
for fld in removed_fields:
self.assertNotIn(fld, mapping_bdm)
def _test_snapshot_from_bdm(self, template):
snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)
self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])
self.assertEqual('snapshot', snapshot['source_type'])
self.assertEqual('volume', snapshot['destination_type'])
self.assertEqual(template.volume_size, snapshot['volume_size'])
self.assertEqual(template.delete_on_termination, snapshot[
'delete_on_termination'])
self.assertEqual(template.device_name, snapshot['device_name'])
for key in ['disk_bus', 'device_type', 'boot_index']:
self.assertEqual(template[key], snapshot[key])
def test_snapshot_from_bdm(self):
for bdm in self.new_mapping:
self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))
def test_snapshot_from_object(self):
for bdm in self.new_mapping[:-1]:
obj = objects.BlockDeviceMapping()
obj = objects.BlockDeviceMapping._from_db_object(None, obj,
fake_block_device.FakeDbBlockDeviceDict(bdm))
self._test_snapshot_from_bdm(obj)
class GetBDMImageMetadataTestCase(test.NoDBTestCase):
def setUp(self):
super().setUp()
self.compute_api = compute_api.API()
self.context = context.RequestContext('fake', 'fake')
def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':
'1', 'delete_on_termination': False}]
expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},
'size': 0, 'status': 'active'}
def get_vol_data(*args, **kwargs):
return {'bootable': is_bootable}
with mock.patch.object(self.compute_api.volume_api, 'get',
side_effect=get_vol_data):
if not is_bootable:
self.assertRaises(exception.InvalidBDMVolumeNotBootable,
block_device.get_bdm_image_metadata, self.context, self
.compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
else:
meta = block_device.get_bdm_image_metadata(self.context,
self.compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(expected_meta, meta)
def test_get_bdm_image_metadata__non_bootable(self):
self._test_get_bdm_image_metadata__bootable(False)
def test_get_bdm_image_metadata__bootable(self):
self._test_get_bdm_image_metadata__bootable(True)
def test_get_bdm_image_metadata__basic_property(self):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':
'1', 'delete_on_termination': False}]
fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':
128, 'foo': 'bar'}}
with mock.patch.object(self.compute_api.volume_api, 'get',
return_value=fake_volume):
meta = block_device.get_bdm_image_metadata(self.context, self.
compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
def test_get_bdm_image_metadata__snapshot_basic_property(self):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':
None, 'delete_on_termination': False}]
fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':
128, 'foo': 'bar'}}
fake_snapshot = {'volume_id': '1'}
with test.nested(mock.patch.object(self.compute_api.volume_api,
'get', return_value=fake_volume), mock.patch.object(self.
compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)
) as (volume_get, volume_get_snapshot):
meta = block_device.get_bdm_image_metadata(self.context, self.
compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
volume_get_snapshot.assert_called_once_with(self.context,
block_device_mapping[0]['snapshot_id'])
volume_get.assert_called_once_with(self.context, fake_snapshot[
'volume_id'])
@mock.patch.object(cinder.API, 'get', side_effect=exception.
CinderConnectionFailed(reason='error'))
def test_get_bdm_image_metadata__cinder_down(self, mock_get):
bdms = [objects.BlockDeviceMapping(**fake_block_device.
FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':
'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]
self.assertRaises(exception.CinderConnectionFailed, block_device.
get_bdm_image_metadata, self.context, self.compute_api.
image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)
class GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):
def test_inherit_image_properties(self):
properties = {'fake_prop': 'fake_value'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(properties, image_meta['properties'])
def test_image_size(self):
volume = {'size': 10}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(10 * units.Gi, image_meta['size'])
def test_image_status(self):
volume = {}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual('active', image_meta['status'])
def test_values_conversion(self):
properties = {'min_ram': '5', 'min_disk': '7'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(5, image_meta['min_ram'])
self.assertEqual(7, image_meta['min_disk'])
def test_suppress_not_image_properties(self):
properties = {'min_ram': '256', 'min_disk': '128', 'image_id':
'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',
'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual({}, image_meta['properties'])
self.assertEqual(0, image_meta['size'])
self.assertNotEqual({}, properties)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class BlockDeviceTestCase(test.NoDBTestCase):
<|reserved_special_token_0|>
def test_properties(self):
root_device0 = '/dev/sda'
root_device1 = '/dev/sdb'
mappings = [{'virtual': 'root', 'device': root_device0}]
properties0 = {'mappings': mappings}
properties1 = {'mappings': mappings, 'root_device_name': root_device1}
self.assertIsNone(block_device.properties_root_device_name({}))
self.assertEqual(root_device0, block_device.
properties_root_device_name(properties0))
self.assertEqual(root_device1, block_device.
properties_root_device_name(properties1))
<|reserved_special_token_0|>
def test_mappings_prepend_dev(self):
mapping = [{'virtual': 'ami', 'device': '/dev/sda'}, {'virtual':
'root', 'device': 'sda'}, {'virtual': 'ephemeral0', 'device':
'sdb'}, {'virtual': 'swap', 'device': 'sdc'}, {'virtual':
'ephemeral1', 'device': 'sdd'}, {'virtual': 'ephemeral2',
'device': 'sde'}]
expected = [{'virtual': 'ami', 'device': '/dev/sda'}, {'virtual':
'root', 'device': 'sda'}, {'virtual': 'ephemeral0', 'device':
'/dev/sdb'}, {'virtual': 'swap', 'device': '/dev/sdc'}, {
'virtual': 'ephemeral1', 'device': '/dev/sdd'}, {'virtual':
'ephemeral2', 'device': '/dev/sde'}]
prepended = block_device.mappings_prepend_dev(mapping)
self.assertEqual(sorted(expected, key=lambda v: v['virtual']),
sorted(prepended, key=lambda v: v['virtual']))
def test_strip_dev(self):
self.assertEqual('sda', block_device.strip_dev('/dev/sda'))
self.assertEqual('sda', block_device.strip_dev('sda'))
self.assertIsNone(block_device.strip_dev(None))
def test_strip_prefix(self):
self.assertEqual('a', block_device.strip_prefix('/dev/sda'))
self.assertEqual('a', block_device.strip_prefix('a'))
self.assertEqual('a', block_device.strip_prefix('xvda'))
self.assertEqual('a', block_device.strip_prefix('vda'))
self.assertEqual('a', block_device.strip_prefix('hda'))
self.assertIsNone(block_device.strip_prefix(None))
def test_get_device_letter(self):
self.assertEqual('', block_device.get_device_letter(''))
self.assertEqual('a', block_device.get_device_letter('/dev/sda1'))
self.assertEqual('b', block_device.get_device_letter('/dev/xvdb'))
self.assertEqual('d', block_device.get_device_letter('/dev/d'))
self.assertEqual('a', block_device.get_device_letter('a'))
self.assertEqual('b', block_device.get_device_letter('sdb2'))
self.assertEqual('c', block_device.get_device_letter('vdc'))
self.assertEqual('c', block_device.get_device_letter('hdc'))
self.assertIsNone(block_device.get_device_letter(None))
def test_generate_device_name(self):
expected = ('vda', ('vd', 0)), ('vdaa', ('vd', 26)), ('vdabc', (
'vd', 730)), ('vdidpok', ('vd', 4194304)), ('sdc', ('sd', 2)), (
'sdaa', ('sd', 26)), ('sdiw', ('sd', 256)), ('hdzz', ('hd', 701))
for res, args in expected:
self.assertEqual(res, block_device.generate_device_name(*args))
<|reserved_special_token_0|>
def test_get_root_bdm(self):
root_bdm = {'device_name': 'vda', 'boot_index': 0}
bdms = [root_bdm, {'device_name': 'vdb', 'boot_index': 1}, {
'device_name': 'vdc', 'boot_index': -1}, {'device_name': 'vdd'}]
self.assertEqual(root_bdm, block_device.get_root_bdm(bdms))
self.assertEqual(root_bdm, block_device.get_root_bdm([bdms[0]]))
self.assertIsNone(block_device.get_root_bdm(bdms[1:]))
self.assertIsNone(block_device.get_root_bdm(bdms[2:]))
self.assertIsNone(block_device.get_root_bdm(bdms[3:]))
self.assertIsNone(block_device.get_root_bdm([]))
def test_get_bdm_ephemeral_disk_size(self):
size = block_device.get_bdm_ephemeral_disk_size(self.new_mapping)
self.assertEqual(10, size)
<|reserved_special_token_0|>
def test_get_bdm_local_disk_num(self):
size = block_device.get_bdm_local_disk_num(self.new_mapping)
self.assertEqual(2, size)
def test_new_format_is_swap(self):
expected_results = [True, False, False, False, False]
for expected, bdm in zip(expected_results, self.new_mapping):
res = block_device.new_format_is_swap(bdm)
self.assertEqual(expected, res)
<|reserved_special_token_0|>
def test_validate_device_name(self):
for value in [' ', 10, None, 'a' * 260]:
self.assertRaises(exception.InvalidBDMFormat, block_device.
validate_device_name, value)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class TestBlockDeviceDict(test.NoDBTestCase):
def setUp(self):
super(TestBlockDeviceDict, self).setUp()
BDM = block_device.BlockDeviceDict
self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'source_type': 'blank',
'destination_type': 'local', 'delete_on_termination': True,
'guest_format': 'swap', 'boot_index': -1}, {'id': 2,
'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',
'source_type': 'blank', 'destination_type': 'local',
'delete_on_termination': True, 'boot_index': -1}, {'id': 3,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',
'source_type': 'volume', 'destination_type': 'volume', 'uuid':
'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda2', 'source_type':
'snapshot', 'destination_type': 'volume', 'uuid':
'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,
'instance_uuid': uuids.instance, 'no_device': True,
'device_name': '/dev/vdc'}]
self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'source_type': 'blank',
'destination_type': 'local', 'delete_on_termination': True,
'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,
'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',
'source_type': 'blank', 'destination_type': 'local',
'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',
'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'connection_info':
"{'fake': 'connection_info'}", 'boot_index': 0}), BDM({'id': 4,
'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',
'source_type': 'snapshot', 'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}", 'snapshot_id':
'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',
'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.
instance, 'no_device': True, 'device_name': '/dev/vdc'})]
self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1', 'delete_on_termination': True,
'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.
instance, 'device_name': '/dev/sdc1', 'delete_on_termination':
True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda1', 'volume_id':
'fake-volume-id-1', 'connection_info':
"{'fake': 'connection_info'}"}, {'id': 4, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda2', 'connection_info':
"{'fake': 'connection_info'}", 'snapshot_id':
'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id':
5, 'instance_uuid': uuids.instance, 'no_device': True,
'device_name': '/dev/vdc'}]
self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda3', 'source_type':
'image', 'destination_type': 'volume', 'connection_info':
"{'fake': 'connection_info'}", 'volume_id': 'fake-volume-id-3',
'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.
instance, 'device_name': '/dev/sda4', 'source_type': 'image',
'destination_type': 'local', 'connection_info':
"{'fake': 'connection_info'}", 'image_id': 'fake-image-id-2',
'boot_index': -1})]
self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':
uuids.instance, 'device_name': '/dev/sda3', 'connection_info':
"{'fake': 'connection_info'}", 'volume_id': 'fake-volume-id-3'}]
def test_init(self):
def fake_validate(obj, dct):
pass
self.stub_out('nova.block_device.BlockDeviceDict._fields', set([
'field1', 'field2']))
self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',
set(['db_field1', 'db_field2']))
self.stub_out('nova.block_device.BlockDeviceDict._validate',
fake_validate)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':
'bar', 'db_field1': 'baz'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},
do_not_default=set(['field2']))
self.assertIn('field1', dev_dict)
self.assertNotIn('field2', dev_dict)
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
dev_dict = block_device.BlockDeviceDict(field1='foo')
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'
)
self.assertEqual('foo', dev_dict['field1'])
self.assertEqual('bar', dev_dict['field2'])
def test_init_prepend_dev_to_device_name(self):
bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':
'vda', 'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vda', bdm_dict['device_name'])
bdm['device_name'] = '/dev/vdb'
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vdb', bdm_dict['device_name'])
bdm['device_name'] = None
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertIsNone(bdm_dict['device_name'])
def test_init_boolify_delete_on_termination(self):
bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':
'vda', 'source_type': 'volume', 'destination_type': 'volume',
'volume_id': 'fake-volume-id-1', 'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertFalse(bdm_dict['delete_on_termination'])
def test_validate(self):
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, {'bogus_field': 'lame_val'})
lame_bdm = dict(self.new_mapping[2])
del lame_bdm['source_type']
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_bdm)
lame_bdm['no_device'] = True
block_device.BlockDeviceDict(lame_bdm)
lame_dev_bdm = dict(self.new_mapping[2])
lame_dev_bdm['device_name'] = 'not a valid name'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_dev_bdm)
lame_dev_bdm['device_name'] = ''
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_dev_bdm)
cool_volume_size_bdm = dict(self.new_mapping[2])
cool_volume_size_bdm['volume_size'] = '42'
cool_volume_size_bdm = block_device.BlockDeviceDict(
cool_volume_size_bdm)
self.assertEqual(42, cool_volume_size_bdm['volume_size'])
lame_volume_size_bdm = dict(self.new_mapping[2])
lame_volume_size_bdm['volume_size'] = 'some_non_int_string'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, lame_volume_size_bdm)
truthy_bdm = dict(self.new_mapping[2])
truthy_bdm['delete_on_termination'] = '1'
truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)
self.assertTrue(truthy_bdm['delete_on_termination'])
verbose_bdm = dict(self.new_mapping[2])
verbose_bdm['boot_index'] = 'first'
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict, verbose_bdm)
def test_from_legacy(self):
for legacy, new in zip(self.legacy_mapping, self.new_mapping):
self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy
), matchers.IsSubDictOf(new))
def test_from_legacy_mapping(self):
def _get_image_bdms(bdms):
return [bdm for bdm in bdms if bdm['source_type'] == 'image']
def _get_bootable_bdms(bdms):
return [bdm for bdm in bdms if bdm['boot_index'] is not None and
bdm['boot_index'] >= 0]
new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)
self.assertEqual(0, len(_get_image_bdms(new_no_img)))
for new, expected in zip(new_no_img, self.new_mapping):
self.assertThat(new, matchers.IsSubDictOf(expected))
new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,
'fake_image_ref')
image_bdms = _get_image_bdms(new_with_img)
boot_bdms = _get_bootable_bdms(new_with_img)
self.assertEqual(1, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, image_bdms[0]['boot_index'])
self.assertEqual('image', boot_bdms[0]['source_type'])
new_with_img_and_root = block_device.from_legacy_mapping(self.
legacy_mapping, 'fake_image_ref', 'sda1')
image_bdms = _get_image_bdms(new_with_img_and_root)
boot_bdms = _get_bootable_bdms(new_with_img_and_root)
self.assertEqual(0, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, boot_bdms[0]['boot_index'])
self.assertEqual('volume', boot_bdms[0]['source_type'])
new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,
'fake_image_ref', 'sda1', no_root=True)
self.assertEqual(0, len(_get_image_bdms(new_no_root)))
self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))
def test_from_api(self):
for api, new in zip(self.api_mapping, self.new_mapping):
new['connection_info'] = None
if new['snapshot_id']:
new['volume_id'] = None
self.assertThat(block_device.BlockDeviceDict.from_api(api,
False), matchers.IsSubDictOf(new))
def test_from_api_invalid_blank_id(self):
api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':
'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination':
True, 'boot_index': -1}
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
def test_from_api_invalid_source_to_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'uuid': 'fake-volume-id-1'}
self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
def test_from_api_valid_source_to_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,
'boot_index': 0}
retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':
'image', 'image_id': 1, 'destination_type': 'local',
'volume_id': 'fake-volume-id-1', 'boot_index': 0})
self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(
api_dict, True))
def test_from_api_valid_source_to_local_mapping_with_string_bi(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,
'boot_index': '0'}
retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':
'image', 'image_id': 1, 'destination_type': 'local',
'volume_id': 'fake-volume-id-1', 'boot_index': 0})
self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(
api_dict, True))
def test_from_api_invalid_image_to_destination_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'image', 'destination_type':
'local', 'uuid': 'fake-volume-id-1', 'volume_type':
'fake-lvm-1', 'boot_index': 1}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn('Mapping image to local is not supported', str(ex))
def test_from_api_invalid_volume_type_to_destination_local_mapping(self):
api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':
'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn(
'Specifying a volume_type with destination_type=local is not supported'
, str(ex))
def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(
self):
api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':
'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat, block_device.
BlockDeviceDict.from_api, api_dict, False)
self.assertIn(
'Specifying volume type to existing volume is not supported',
str(ex))
def test_image_mapping(self):
removed_fields = ['id', 'instance_uuid', 'connection_info',
'created_at', 'updated_at', 'deleted_at', 'deleted']
for bdm in self.new_mapping:
mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm
).get_image_mapping()
for fld in removed_fields:
self.assertNotIn(fld, mapping_bdm)
def _test_snapshot_from_bdm(self, template):
snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)
self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])
self.assertEqual('snapshot', snapshot['source_type'])
self.assertEqual('volume', snapshot['destination_type'])
self.assertEqual(template.volume_size, snapshot['volume_size'])
self.assertEqual(template.delete_on_termination, snapshot[
'delete_on_termination'])
self.assertEqual(template.device_name, snapshot['device_name'])
for key in ['disk_bus', 'device_type', 'boot_index']:
self.assertEqual(template[key], snapshot[key])
def test_snapshot_from_bdm(self):
for bdm in self.new_mapping:
self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))
def test_snapshot_from_object(self):
for bdm in self.new_mapping[:-1]:
obj = objects.BlockDeviceMapping()
obj = objects.BlockDeviceMapping._from_db_object(None, obj,
fake_block_device.FakeDbBlockDeviceDict(bdm))
self._test_snapshot_from_bdm(obj)
class GetBDMImageMetadataTestCase(test.NoDBTestCase):
def setUp(self):
super().setUp()
self.compute_api = compute_api.API()
self.context = context.RequestContext('fake', 'fake')
def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':
'1', 'delete_on_termination': False}]
expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},
'size': 0, 'status': 'active'}
def get_vol_data(*args, **kwargs):
return {'bootable': is_bootable}
with mock.patch.object(self.compute_api.volume_api, 'get',
side_effect=get_vol_data):
if not is_bootable:
self.assertRaises(exception.InvalidBDMVolumeNotBootable,
block_device.get_bdm_image_metadata, self.context, self
.compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
else:
meta = block_device.get_bdm_image_metadata(self.context,
self.compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(expected_meta, meta)
def test_get_bdm_image_metadata__non_bootable(self):
self._test_get_bdm_image_metadata__bootable(False)
def test_get_bdm_image_metadata__bootable(self):
self._test_get_bdm_image_metadata__bootable(True)
def test_get_bdm_image_metadata__basic_property(self):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':
'1', 'delete_on_termination': False}]
fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':
128, 'foo': 'bar'}}
with mock.patch.object(self.compute_api.volume_api, 'get',
return_value=fake_volume):
meta = block_device.get_bdm_image_metadata(self.context, self.
compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
def test_get_bdm_image_metadata__snapshot_basic_property(self):
block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':
None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':
None, 'delete_on_termination': False}]
fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':
128, 'foo': 'bar'}}
fake_snapshot = {'volume_id': '1'}
with test.nested(mock.patch.object(self.compute_api.volume_api,
'get', return_value=fake_volume), mock.patch.object(self.
compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)
) as (volume_get, volume_get_snapshot):
meta = block_device.get_bdm_image_metadata(self.context, self.
compute_api.image_api, self.compute_api.volume_api,
block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
volume_get_snapshot.assert_called_once_with(self.context,
block_device_mapping[0]['snapshot_id'])
volume_get.assert_called_once_with(self.context, fake_snapshot[
'volume_id'])
@mock.patch.object(cinder.API, 'get', side_effect=exception.
CinderConnectionFailed(reason='error'))
def test_get_bdm_image_metadata__cinder_down(self, mock_get):
bdms = [objects.BlockDeviceMapping(**fake_block_device.
FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':
'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]
self.assertRaises(exception.CinderConnectionFailed, block_device.
get_bdm_image_metadata, self.context, self.compute_api.
image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)
class GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):
def test_inherit_image_properties(self):
properties = {'fake_prop': 'fake_value'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(properties, image_meta['properties'])
def test_image_size(self):
volume = {'size': 10}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(10 * units.Gi, image_meta['size'])
def test_image_status(self):
volume = {}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual('active', image_meta['status'])
def test_values_conversion(self):
properties = {'min_ram': '5', 'min_disk': '7'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(5, image_meta['min_ram'])
self.assertEqual(7, image_meta['min_disk'])
def test_suppress_not_image_properties(self):
properties = {'min_ram': '256', 'min_disk': '128', 'image_id':
'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',
'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual({}, image_meta['properties'])
self.assertEqual(0, image_meta['size'])
self.assertNotEqual({}, properties)
<|reserved_special_token_1|>
# Copyright 2011 Isaku Yamahata
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for Block Device utility functions.
"""
from unittest import mock
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import units
from nova import block_device
from nova.compute import api as compute_api
from nova import context
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit import fake_block_device
from nova.tests.unit import matchers
from nova.volume import cinder
class BlockDeviceTestCase(test.NoDBTestCase):
def setUp(self):
super(BlockDeviceTestCase, self).setUp()
BDM = block_device.BlockDeviceDict
self.new_mapping = [
BDM({'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'volume_size': 1,
'guest_format': 'swap',
'boot_index': -1}),
BDM({'id': 2, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdc1',
'source_type': 'blank',
'destination_type': 'local',
'volume_size': 10,
'delete_on_termination': True,
'boot_index': -1}),
BDM({'id': 3, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda1',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'connection_info': "{'fake': 'connection_info'}",
'boot_index': 0}),
BDM({'id': 4, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda2',
'source_type': 'snapshot',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'snapshot_id': 'fake-snapshot-id-1',
'volume_id': 'fake-volume-id-2',
'boot_index': -1}),
BDM({'id': 5, 'instance_uuid': uuids.instance,
'no_device': True,
'device_name': '/dev/vdc'}),
]
def test_properties(self):
root_device0 = '/dev/sda'
root_device1 = '/dev/sdb'
mappings = [{'virtual': 'root',
'device': root_device0}]
properties0 = {'mappings': mappings}
properties1 = {'mappings': mappings,
'root_device_name': root_device1}
self.assertIsNone(block_device.properties_root_device_name({}))
self.assertEqual(root_device0,
block_device.properties_root_device_name(properties0))
self.assertEqual(root_device1,
block_device.properties_root_device_name(properties1))
def test_ephemeral(self):
self.assertFalse(block_device.is_ephemeral('ephemeral'))
self.assertTrue(block_device.is_ephemeral('ephemeral0'))
self.assertTrue(block_device.is_ephemeral('ephemeral1'))
self.assertTrue(block_device.is_ephemeral('ephemeral11'))
self.assertFalse(block_device.is_ephemeral('root'))
self.assertFalse(block_device.is_ephemeral('swap'))
self.assertFalse(block_device.is_ephemeral('/dev/sda1'))
self.assertEqual(0, block_device.ephemeral_num('ephemeral0'))
self.assertEqual(1, block_device.ephemeral_num('ephemeral1'))
self.assertEqual(11, block_device.ephemeral_num('ephemeral11'))
self.assertFalse(block_device.is_swap_or_ephemeral('ephemeral'))
self.assertTrue(block_device.is_swap_or_ephemeral('ephemeral0'))
self.assertTrue(block_device.is_swap_or_ephemeral('ephemeral1'))
self.assertTrue(block_device.is_swap_or_ephemeral('swap'))
self.assertFalse(block_device.is_swap_or_ephemeral('root'))
self.assertFalse(block_device.is_swap_or_ephemeral('/dev/sda1'))
def test_mappings_prepend_dev(self):
mapping = [
{'virtual': 'ami', 'device': '/dev/sda'},
{'virtual': 'root', 'device': 'sda'},
{'virtual': 'ephemeral0', 'device': 'sdb'},
{'virtual': 'swap', 'device': 'sdc'},
{'virtual': 'ephemeral1', 'device': 'sdd'},
{'virtual': 'ephemeral2', 'device': 'sde'}]
expected = [
{'virtual': 'ami', 'device': '/dev/sda'},
{'virtual': 'root', 'device': 'sda'},
{'virtual': 'ephemeral0', 'device': '/dev/sdb'},
{'virtual': 'swap', 'device': '/dev/sdc'},
{'virtual': 'ephemeral1', 'device': '/dev/sdd'},
{'virtual': 'ephemeral2', 'device': '/dev/sde'}]
prepended = block_device.mappings_prepend_dev(mapping)
self.assertEqual(sorted(expected, key=lambda v: v['virtual']),
sorted(prepended, key=lambda v: v['virtual']))
def test_strip_dev(self):
self.assertEqual('sda', block_device.strip_dev('/dev/sda'))
self.assertEqual('sda', block_device.strip_dev('sda'))
self.assertIsNone(block_device.strip_dev(None))
def test_strip_prefix(self):
self.assertEqual('a', block_device.strip_prefix('/dev/sda'))
self.assertEqual('a', block_device.strip_prefix('a'))
self.assertEqual('a', block_device.strip_prefix('xvda'))
self.assertEqual('a', block_device.strip_prefix('vda'))
self.assertEqual('a', block_device.strip_prefix('hda'))
self.assertIsNone(block_device.strip_prefix(None))
def test_get_device_letter(self):
self.assertEqual('', block_device.get_device_letter(''))
self.assertEqual('a', block_device.get_device_letter('/dev/sda1'))
self.assertEqual('b', block_device.get_device_letter('/dev/xvdb'))
self.assertEqual('d', block_device.get_device_letter('/dev/d'))
self.assertEqual('a', block_device.get_device_letter('a'))
self.assertEqual('b', block_device.get_device_letter('sdb2'))
self.assertEqual('c', block_device.get_device_letter('vdc'))
self.assertEqual('c', block_device.get_device_letter('hdc'))
self.assertIsNone(block_device.get_device_letter(None))
def test_generate_device_name(self):
expected = (
('vda', ("vd", 0)),
('vdaa', ("vd", 26)),
('vdabc', ("vd", 730)),
('vdidpok', ("vd", 4194304)),
('sdc', ("sd", 2)),
('sdaa', ("sd", 26)),
('sdiw', ("sd", 256)),
('hdzz', ("hd", 701))
)
for res, args in expected:
self.assertEqual(res, block_device.generate_device_name(*args))
def test_volume_in_mapping(self):
swap = {'device_name': '/dev/sdb',
'swap_size': 1}
ephemerals = [{'num': 0,
'virtual_name': 'ephemeral0',
'device_name': '/dev/sdc1',
'size': 1},
{'num': 2,
'virtual_name': 'ephemeral2',
'device_name': '/dev/sdd',
'size': 1}]
block_device_mapping = [{'mount_device': '/dev/sde',
'device_path': 'fake_device'},
{'mount_device': '/dev/sdf',
'device_path': 'fake_device'}]
block_device_info = {
'root_device_name': '/dev/sda',
'swap': swap,
'ephemerals': ephemerals,
'block_device_mapping': block_device_mapping}
def _assert_volume_in_mapping(device_name, true_or_false):
in_mapping = block_device.volume_in_mapping(
device_name, block_device_info)
self.assertEqual(true_or_false, in_mapping)
_assert_volume_in_mapping('sda', False)
_assert_volume_in_mapping('sdb', True)
_assert_volume_in_mapping('sdc1', True)
_assert_volume_in_mapping('sdd', True)
_assert_volume_in_mapping('sde', True)
_assert_volume_in_mapping('sdf', True)
_assert_volume_in_mapping('sdg', False)
_assert_volume_in_mapping('sdh1', False)
def test_get_root_bdm(self):
root_bdm = {'device_name': 'vda', 'boot_index': 0}
bdms = [root_bdm,
{'device_name': 'vdb', 'boot_index': 1},
{'device_name': 'vdc', 'boot_index': -1},
{'device_name': 'vdd'}]
self.assertEqual(root_bdm, block_device.get_root_bdm(bdms))
self.assertEqual(root_bdm, block_device.get_root_bdm([bdms[0]]))
self.assertIsNone(block_device.get_root_bdm(bdms[1:]))
self.assertIsNone(block_device.get_root_bdm(bdms[2:]))
self.assertIsNone(block_device.get_root_bdm(bdms[3:]))
self.assertIsNone(block_device.get_root_bdm([]))
def test_get_bdm_ephemeral_disk_size(self):
size = block_device.get_bdm_ephemeral_disk_size(self.new_mapping)
self.assertEqual(10, size)
def test_get_bdm_swap_list(self):
swap_list = block_device.get_bdm_swap_list(self.new_mapping)
self.assertEqual(1, len(swap_list))
self.assertEqual(1, swap_list[0].get('id'))
def test_get_bdm_local_disk_num(self):
size = block_device.get_bdm_local_disk_num(self.new_mapping)
self.assertEqual(2, size)
def test_new_format_is_swap(self):
expected_results = [True, False, False, False, False]
for expected, bdm in zip(expected_results, self.new_mapping):
res = block_device.new_format_is_swap(bdm)
self.assertEqual(expected, res)
def test_new_format_is_ephemeral(self):
expected_results = [False, True, False, False, False]
for expected, bdm in zip(expected_results, self.new_mapping):
res = block_device.new_format_is_ephemeral(bdm)
self.assertEqual(expected, res)
def test_validate_device_name(self):
for value in [' ', 10, None, 'a' * 260]:
self.assertRaises(exception.InvalidBDMFormat,
block_device.validate_device_name,
value)
def test_validate_and_default_volume_size(self):
bdm = {}
for value in [-1, 'a', 2.5]:
bdm['volume_size'] = value
self.assertRaises(exception.InvalidBDMFormat,
block_device.validate_and_default_volume_size,
bdm)
def test_get_bdms_to_connect(self):
root_bdm = {'device_name': 'vda', 'boot_index': 0}
bdms = [root_bdm,
{'device_name': 'vdb', 'boot_index': 1},
{'device_name': 'vdc', 'boot_index': -1},
{'device_name': 'vde', 'boot_index': None},
{'device_name': 'vdd'}]
self.assertNotIn(root_bdm, block_device.get_bdms_to_connect(bdms,
exclude_root_mapping=True))
self.assertIn(root_bdm, block_device.get_bdms_to_connect(bdms))
class TestBlockDeviceDict(test.NoDBTestCase):
def setUp(self):
super(TestBlockDeviceDict, self).setUp()
BDM = block_device.BlockDeviceDict
self.api_mapping = [
{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'guest_format': 'swap',
'boot_index': -1},
{'id': 2, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdc1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'boot_index': -1},
{'id': 3, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda1',
'source_type': 'volume',
'destination_type': 'volume',
'uuid': 'fake-volume-id-1',
'boot_index': 0},
{'id': 4, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda2',
'source_type': 'snapshot',
'destination_type': 'volume',
'uuid': 'fake-snapshot-id-1',
'boot_index': -1},
{'id': 5, 'instance_uuid': uuids.instance,
'no_device': True,
'device_name': '/dev/vdc'},
]
self.new_mapping = [
BDM({'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'guest_format': 'swap',
'boot_index': -1}),
BDM({'id': 2, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdc1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'boot_index': -1}),
BDM({'id': 3, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda1',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'connection_info': "{'fake': 'connection_info'}",
'boot_index': 0}),
BDM({'id': 4, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda2',
'source_type': 'snapshot',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'snapshot_id': 'fake-snapshot-id-1',
'volume_id': 'fake-volume-id-2',
'boot_index': -1}),
BDM({'id': 5, 'instance_uuid': uuids.instance,
'no_device': True,
'device_name': '/dev/vdc'}),
]
self.legacy_mapping = [
{'id': 1, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdb1',
'delete_on_termination': True,
'virtual_name': 'swap'},
{'id': 2, 'instance_uuid': uuids.instance,
'device_name': '/dev/sdc1',
'delete_on_termination': True,
'virtual_name': 'ephemeral0'},
{'id': 3, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda1',
'volume_id': 'fake-volume-id-1',
'connection_info': "{'fake': 'connection_info'}"},
{'id': 4, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda2',
'connection_info': "{'fake': 'connection_info'}",
'snapshot_id': 'fake-snapshot-id-1',
'volume_id': 'fake-volume-id-2'},
{'id': 5, 'instance_uuid': uuids.instance,
'no_device': True,
'device_name': '/dev/vdc'},
]
self.new_mapping_source_image = [
BDM({'id': 6, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda3',
'source_type': 'image',
'destination_type': 'volume',
'connection_info': "{'fake': 'connection_info'}",
'volume_id': 'fake-volume-id-3',
'boot_index': -1}),
BDM({'id': 7, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda4',
'source_type': 'image',
'destination_type': 'local',
'connection_info': "{'fake': 'connection_info'}",
'image_id': 'fake-image-id-2',
'boot_index': -1}),
]
self.legacy_mapping_source_image = [
{'id': 6, 'instance_uuid': uuids.instance,
'device_name': '/dev/sda3',
'connection_info': "{'fake': 'connection_info'}",
'volume_id': 'fake-volume-id-3'},
]
def test_init(self):
def fake_validate(obj, dct):
pass
self.stub_out('nova.block_device.BlockDeviceDict._fields',
set(['field1', 'field2']))
self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',
set(['db_field1', 'db_field2']))
self.stub_out('nova.block_device.BlockDeviceDict._validate',
fake_validate)
# Make sure db fields are not picked up if they are not
# in the original dict
dev_dict = block_device.BlockDeviceDict({'field1': 'foo',
'field2': 'bar',
'db_field1': 'baz'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
# Make sure all expected fields are defaulted
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
# Unless they are not meant to be
dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},
do_not_default=set(['field2']))
self.assertIn('field1', dev_dict)
self.assertNotIn('field2', dev_dict)
self.assertNotIn('db_field1', dev_dict)
self.assertNotIn('db_field2', dev_dict)
# Passing kwargs to constructor works
dev_dict = block_device.BlockDeviceDict(field1='foo')
self.assertIn('field1', dev_dict)
self.assertIn('field2', dev_dict)
self.assertIsNone(dev_dict['field2'])
dev_dict = block_device.BlockDeviceDict(
{'field1': 'foo'}, field2='bar')
self.assertEqual('foo', dev_dict['field1'])
self.assertEqual('bar', dev_dict['field2'])
def test_init_prepend_dev_to_device_name(self):
bdm = {'id': 3, 'instance_uuid': uuids.instance,
'device_name': 'vda',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vda', bdm_dict['device_name'])
bdm['device_name'] = '/dev/vdb'
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertEqual('/dev/vdb', bdm_dict['device_name'])
bdm['device_name'] = None
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertIsNone(bdm_dict['device_name'])
def test_init_boolify_delete_on_termination(self):
# Make sure that when delete_on_termination is not passed it's
# still set to False and not None
bdm = {'id': 3, 'instance_uuid': uuids.instance,
'device_name': 'vda',
'source_type': 'volume',
'destination_type': 'volume',
'volume_id': 'fake-volume-id-1',
'boot_index': 0}
bdm_dict = block_device.BlockDeviceDict(bdm)
self.assertFalse(bdm_dict['delete_on_termination'])
def test_validate(self):
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
{'bogus_field': 'lame_val'})
lame_bdm = dict(self.new_mapping[2])
del lame_bdm['source_type']
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
lame_bdm)
lame_bdm['no_device'] = True
block_device.BlockDeviceDict(lame_bdm)
lame_dev_bdm = dict(self.new_mapping[2])
lame_dev_bdm['device_name'] = "not a valid name"
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
lame_dev_bdm)
lame_dev_bdm['device_name'] = ""
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
lame_dev_bdm)
cool_volume_size_bdm = dict(self.new_mapping[2])
cool_volume_size_bdm['volume_size'] = '42'
cool_volume_size_bdm = block_device.BlockDeviceDict(
cool_volume_size_bdm)
self.assertEqual(42, cool_volume_size_bdm['volume_size'])
lame_volume_size_bdm = dict(self.new_mapping[2])
lame_volume_size_bdm['volume_size'] = 'some_non_int_string'
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
lame_volume_size_bdm)
truthy_bdm = dict(self.new_mapping[2])
truthy_bdm['delete_on_termination'] = '1'
truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)
self.assertTrue(truthy_bdm['delete_on_termination'])
verbose_bdm = dict(self.new_mapping[2])
verbose_bdm['boot_index'] = 'first'
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict,
verbose_bdm)
def test_from_legacy(self):
for legacy, new in zip(self.legacy_mapping, self.new_mapping):
self.assertThat(
block_device.BlockDeviceDict.from_legacy(legacy),
matchers.IsSubDictOf(new))
def test_from_legacy_mapping(self):
def _get_image_bdms(bdms):
return [bdm for bdm in bdms if bdm['source_type'] == 'image']
def _get_bootable_bdms(bdms):
return [bdm for bdm in bdms
if (bdm['boot_index'] is not None and
bdm['boot_index'] >= 0)]
new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)
self.assertEqual(0, len(_get_image_bdms(new_no_img)))
for new, expected in zip(new_no_img, self.new_mapping):
self.assertThat(new, matchers.IsSubDictOf(expected))
new_with_img = block_device.from_legacy_mapping(
self.legacy_mapping, 'fake_image_ref')
image_bdms = _get_image_bdms(new_with_img)
boot_bdms = _get_bootable_bdms(new_with_img)
self.assertEqual(1, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, image_bdms[0]['boot_index'])
self.assertEqual('image', boot_bdms[0]['source_type'])
new_with_img_and_root = block_device.from_legacy_mapping(
self.legacy_mapping, 'fake_image_ref', 'sda1')
image_bdms = _get_image_bdms(new_with_img_and_root)
boot_bdms = _get_bootable_bdms(new_with_img_and_root)
self.assertEqual(0, len(image_bdms))
self.assertEqual(1, len(boot_bdms))
self.assertEqual(0, boot_bdms[0]['boot_index'])
self.assertEqual('volume', boot_bdms[0]['source_type'])
new_no_root = block_device.from_legacy_mapping(
self.legacy_mapping, 'fake_image_ref', 'sda1', no_root=True)
self.assertEqual(0, len(_get_image_bdms(new_no_root)))
self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))
def test_from_api(self):
for api, new in zip(self.api_mapping, self.new_mapping):
new['connection_info'] = None
if new['snapshot_id']:
new['volume_id'] = None
self.assertThat(
block_device.BlockDeviceDict.from_api(api, False),
matchers.IsSubDictOf(new))
def test_from_api_invalid_blank_id(self):
api_dict = {'id': 1,
'source_type': 'blank',
'destination_type': 'volume',
'uuid': 'fake-volume-id-1',
'delete_on_termination': True,
'boot_index': -1}
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict.from_api, api_dict,
False)
def test_from_api_invalid_source_to_local_mapping(self):
api_dict = {'id': 1,
'source_type': 'image',
'destination_type': 'local',
'uuid': 'fake-volume-id-1'}
self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict.from_api, api_dict,
False)
def test_from_api_valid_source_to_local_mapping(self):
api_dict = {'id': 1,
'source_type': 'image',
'destination_type': 'local',
'volume_id': 'fake-volume-id-1',
'uuid': 1,
'boot_index': 0}
retexp = block_device.BlockDeviceDict(
{'id': 1,
'source_type': 'image',
'image_id': 1,
'destination_type': 'local',
'volume_id': 'fake-volume-id-1',
'boot_index': 0})
self.assertEqual(retexp,
block_device.BlockDeviceDict.from_api(api_dict, True))
def test_from_api_valid_source_to_local_mapping_with_string_bi(self):
api_dict = {'id': 1,
'source_type': 'image',
'destination_type': 'local',
'volume_id': 'fake-volume-id-1',
'uuid': 1,
'boot_index': '0'}
retexp = block_device.BlockDeviceDict(
{'id': 1,
'source_type': 'image',
'image_id': 1,
'destination_type': 'local',
'volume_id': 'fake-volume-id-1',
'boot_index': 0})
self.assertEqual(retexp,
block_device.BlockDeviceDict.from_api(api_dict, True))
def test_from_api_invalid_image_to_destination_local_mapping(self):
api_dict = {'id': 1,
'source_type': 'image',
'destination_type': 'local',
'uuid': 'fake-volume-id-1',
'volume_type': 'fake-lvm-1',
'boot_index': 1}
ex = self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict.from_api,
api_dict, False)
self.assertIn('Mapping image to local is not supported', str(ex))
def test_from_api_invalid_volume_type_to_destination_local_mapping(self):
api_dict = {'id': 1,
'source_type': 'volume',
'destination_type': 'local',
'uuid': 'fake-volume-id-1',
'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict.from_api,
api_dict, False)
self.assertIn('Specifying a volume_type with destination_type=local '
'is not supported', str(ex))
def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(
self):
api_dict = {'id': 1,
'source_type': 'volume',
'destination_type': 'volume',
'uuid': 'fake-volume-id-1',
'volume_type': 'fake-lvm-1'}
ex = self.assertRaises(exception.InvalidBDMFormat,
block_device.BlockDeviceDict.from_api,
api_dict, False)
self.assertIn('Specifying volume type to existing volume is '
'not supported', str(ex))
def test_image_mapping(self):
removed_fields = ['id', 'instance_uuid', 'connection_info',
'created_at', 'updated_at', 'deleted_at', 'deleted']
for bdm in self.new_mapping:
mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(
bdm).get_image_mapping()
for fld in removed_fields:
self.assertNotIn(fld, mapping_bdm)
def _test_snapshot_from_bdm(self, template):
snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)
self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])
self.assertEqual('snapshot', snapshot['source_type'])
self.assertEqual('volume', snapshot['destination_type'])
self.assertEqual(template.volume_size, snapshot['volume_size'])
self.assertEqual(template.delete_on_termination,
snapshot['delete_on_termination'])
self.assertEqual(template.device_name, snapshot['device_name'])
for key in ['disk_bus', 'device_type', 'boot_index']:
self.assertEqual(template[key], snapshot[key])
def test_snapshot_from_bdm(self):
for bdm in self.new_mapping:
self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))
def test_snapshot_from_object(self):
for bdm in self.new_mapping[:-1]:
obj = objects.BlockDeviceMapping()
obj = objects.BlockDeviceMapping._from_db_object(
None, obj, fake_block_device.FakeDbBlockDeviceDict(
bdm))
self._test_snapshot_from_bdm(obj)
class GetBDMImageMetadataTestCase(test.NoDBTestCase):
def setUp(self):
super().setUp()
self.compute_api = compute_api.API()
self.context = context.RequestContext('fake', 'fake')
def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):
block_device_mapping = [{
'id': 1,
'device_name': 'vda',
'no_device': None,
'virtual_name': None,
'snapshot_id': None,
'volume_id': '1',
'delete_on_termination': False,
}]
expected_meta = {
'min_disk': 0, 'min_ram': 0, 'properties': {}, 'size': 0,
'status': 'active',
}
def get_vol_data(*args, **kwargs):
return {'bootable': is_bootable}
with mock.patch.object(
self.compute_api.volume_api, 'get', side_effect=get_vol_data,
):
if not is_bootable:
self.assertRaises(
exception.InvalidBDMVolumeNotBootable,
block_device.get_bdm_image_metadata,
self.context,
self.compute_api.image_api,
self.compute_api.volume_api,
block_device_mapping)
else:
meta = block_device.get_bdm_image_metadata(
self.context, self.compute_api.image_api,
self.compute_api.volume_api, block_device_mapping)
self.assertEqual(expected_meta, meta)
def test_get_bdm_image_metadata__non_bootable(self):
self._test_get_bdm_image_metadata__bootable(False)
def test_get_bdm_image_metadata__bootable(self):
self._test_get_bdm_image_metadata__bootable(True)
def test_get_bdm_image_metadata__basic_property(self):
block_device_mapping = [{
'id': 1,
'device_name': 'vda',
'no_device': None,
'virtual_name': None,
'snapshot_id': None,
'volume_id': '1',
'delete_on_termination': False,
}]
fake_volume = {
'volume_image_metadata': {
'min_ram': 256, 'min_disk': 128, 'foo': 'bar',
},
}
with mock.patch.object(
self.compute_api.volume_api, 'get', return_value=fake_volume,
):
meta = block_device.get_bdm_image_metadata(
self.context, self.compute_api.image_api,
self.compute_api.volume_api, block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
def test_get_bdm_image_metadata__snapshot_basic_property(self):
block_device_mapping = [{
'id': 1,
'device_name': 'vda',
'no_device': None,
'virtual_name': None,
'snapshot_id': '2',
'volume_id': None,
'delete_on_termination': False,
}]
fake_volume = {
'volume_image_metadata': {
'min_ram': 256, 'min_disk': 128, 'foo': 'bar',
},
}
fake_snapshot = {'volume_id': '1'}
with test.nested(
mock.patch.object(
self.compute_api.volume_api, 'get',
return_value=fake_volume),
mock.patch.object(
self.compute_api.volume_api, 'get_snapshot',
return_value=fake_snapshot),
) as (volume_get, volume_get_snapshot):
meta = block_device.get_bdm_image_metadata(
self.context, self.compute_api.image_api,
self.compute_api.volume_api, block_device_mapping)
self.assertEqual(256, meta['min_ram'])
self.assertEqual(128, meta['min_disk'])
self.assertEqual('active', meta['status'])
self.assertEqual('bar', meta['properties']['foo'])
volume_get_snapshot.assert_called_once_with(
self.context, block_device_mapping[0]['snapshot_id'])
volume_get.assert_called_once_with(
self.context, fake_snapshot['volume_id'])
@mock.patch.object(
cinder.API, 'get',
side_effect=exception.CinderConnectionFailed(reason='error'))
def test_get_bdm_image_metadata__cinder_down(self, mock_get):
bdms = [
objects.BlockDeviceMapping(
**fake_block_device.FakeDbBlockDeviceDict({
'id': 1,
'volume_id': 1,
'source_type': 'volume',
'destination_type': 'volume',
'device_name': 'vda',
})
)
]
self.assertRaises(
exception.CinderConnectionFailed,
block_device.get_bdm_image_metadata,
self.context,
self.compute_api.image_api,
self.compute_api.volume_api,
bdms, legacy_bdm=True)
class GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):
def test_inherit_image_properties(self):
properties = {'fake_prop': 'fake_value'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(properties, image_meta['properties'])
def test_image_size(self):
volume = {'size': 10}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(10 * units.Gi, image_meta['size'])
def test_image_status(self):
volume = {}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual('active', image_meta['status'])
def test_values_conversion(self):
properties = {'min_ram': '5', 'min_disk': '7'}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual(5, image_meta['min_ram'])
self.assertEqual(7, image_meta['min_disk'])
def test_suppress_not_image_properties(self):
properties = {
'min_ram': '256', 'min_disk': '128', 'image_id': 'fake_id',
'image_name': 'fake_name', 'container_format': 'ami',
'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum',
}
volume = {'volume_image_metadata': properties}
image_meta = block_device.get_image_metadata_from_volume(volume)
self.assertEqual({}, image_meta['properties'])
self.assertEqual(0, image_meta['size'])
# volume's properties should not be touched
self.assertNotEqual({}, properties)
|
flexible
|
{
"blob_id": "d56e313318635788ae5b3d3a3f767450ab2f2296",
"index": 4985,
"step-1": "<mask token>\n\n\nclass BlockDeviceTestCase(test.NoDBTestCase):\n <mask token>\n\n def test_properties(self):\n root_device0 = '/dev/sda'\n root_device1 = '/dev/sdb'\n mappings = [{'virtual': 'root', 'device': root_device0}]\n properties0 = {'mappings': mappings}\n properties1 = {'mappings': mappings, 'root_device_name': root_device1}\n self.assertIsNone(block_device.properties_root_device_name({}))\n self.assertEqual(root_device0, block_device.\n properties_root_device_name(properties0))\n self.assertEqual(root_device1, block_device.\n properties_root_device_name(properties1))\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def test_validate_device_name(self):\n for value in [' ', 10, None, 'a' * 260]:\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n validate_device_name, value)\n <mask token>\n <mask token>\n\n\nclass TestBlockDeviceDict(test.NoDBTestCase):\n\n def setUp(self):\n super(TestBlockDeviceDict, self).setUp()\n BDM = block_device.BlockDeviceDict\n self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}, {'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}, {'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume', 'uuid':\n 'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'source_type':\n 'snapshot', 'destination_type': 'volume', 'uuid':\n 'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,\n 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\", 'boot_index': 0}), BDM({'id': 4,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',\n 'source_type': 'snapshot', 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.\n instance, 'no_device': True, 'device_name': '/dev/vdc'})]\n self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'delete_on_termination': True,\n 'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sdc1', 'delete_on_termination': \n True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda1', 'volume_id':\n 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\"}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'connection_info':\n \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id': \n 5, 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'source_type':\n 'image', 'destination_type': 'volume', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3',\n 'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sda4', 'source_type': 'image',\n 'destination_type': 'local', 'connection_info':\n \"{'fake': 'connection_info'}\", 'image_id': 'fake-image-id-2',\n 'boot_index': -1})]\n self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3'}]\n\n def test_init(self):\n\n def fake_validate(obj, dct):\n pass\n self.stub_out('nova.block_device.BlockDeviceDict._fields', set([\n 'field1', 'field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',\n set(['db_field1', 'db_field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._validate',\n fake_validate)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':\n 'bar', 'db_field1': 'baz'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},\n do_not_default=set(['field2']))\n self.assertIn('field1', dev_dict)\n self.assertNotIn('field2', dev_dict)\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict(field1='foo')\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'\n )\n self.assertEqual('foo', dev_dict['field1'])\n self.assertEqual('bar', dev_dict['field2'])\n\n def test_init_prepend_dev_to_device_name(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vda', bdm_dict['device_name'])\n bdm['device_name'] = '/dev/vdb'\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vdb', bdm_dict['device_name'])\n bdm['device_name'] = None\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertIsNone(bdm_dict['device_name'])\n\n def test_init_boolify_delete_on_termination(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertFalse(bdm_dict['delete_on_termination'])\n\n def test_validate(self):\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, {'bogus_field': 'lame_val'})\n lame_bdm = dict(self.new_mapping[2])\n del lame_bdm['source_type']\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_bdm)\n lame_bdm['no_device'] = True\n block_device.BlockDeviceDict(lame_bdm)\n lame_dev_bdm = dict(self.new_mapping[2])\n lame_dev_bdm['device_name'] = 'not a valid name'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n lame_dev_bdm['device_name'] = ''\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n cool_volume_size_bdm = dict(self.new_mapping[2])\n cool_volume_size_bdm['volume_size'] = '42'\n cool_volume_size_bdm = block_device.BlockDeviceDict(\n cool_volume_size_bdm)\n self.assertEqual(42, cool_volume_size_bdm['volume_size'])\n lame_volume_size_bdm = dict(self.new_mapping[2])\n lame_volume_size_bdm['volume_size'] = 'some_non_int_string'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_volume_size_bdm)\n truthy_bdm = dict(self.new_mapping[2])\n truthy_bdm['delete_on_termination'] = '1'\n truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)\n self.assertTrue(truthy_bdm['delete_on_termination'])\n verbose_bdm = dict(self.new_mapping[2])\n verbose_bdm['boot_index'] = 'first'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, verbose_bdm)\n\n def test_from_legacy(self):\n for legacy, new in zip(self.legacy_mapping, self.new_mapping):\n self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy\n ), matchers.IsSubDictOf(new))\n\n def test_from_legacy_mapping(self):\n\n def _get_image_bdms(bdms):\n return [bdm for bdm in bdms if bdm['source_type'] == 'image']\n\n def _get_bootable_bdms(bdms):\n return [bdm for bdm in bdms if bdm['boot_index'] is not None and\n bdm['boot_index'] >= 0]\n new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)\n self.assertEqual(0, len(_get_image_bdms(new_no_img)))\n for new, expected in zip(new_no_img, self.new_mapping):\n self.assertThat(new, matchers.IsSubDictOf(expected))\n new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref')\n image_bdms = _get_image_bdms(new_with_img)\n boot_bdms = _get_bootable_bdms(new_with_img)\n self.assertEqual(1, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, image_bdms[0]['boot_index'])\n self.assertEqual('image', boot_bdms[0]['source_type'])\n new_with_img_and_root = block_device.from_legacy_mapping(self.\n legacy_mapping, 'fake_image_ref', 'sda1')\n image_bdms = _get_image_bdms(new_with_img_and_root)\n boot_bdms = _get_bootable_bdms(new_with_img_and_root)\n self.assertEqual(0, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, boot_bdms[0]['boot_index'])\n self.assertEqual('volume', boot_bdms[0]['source_type'])\n new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref', 'sda1', no_root=True)\n self.assertEqual(0, len(_get_image_bdms(new_no_root)))\n self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))\n\n def test_from_api(self):\n for api, new in zip(self.api_mapping, self.new_mapping):\n new['connection_info'] = None\n if new['snapshot_id']:\n new['volume_id'] = None\n self.assertThat(block_device.BlockDeviceDict.from_api(api, \n False), matchers.IsSubDictOf(new))\n\n def test_from_api_invalid_blank_id(self):\n api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination': \n True, 'boot_index': -1}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_invalid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1'}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_valid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': 0}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_valid_source_to_local_mapping_with_string_bi(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': '0'}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_invalid_image_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type':\n 'fake-lvm-1', 'boot_index': 1}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn('Mapping image to local is not supported', str(ex))\n\n def test_from_api_invalid_volume_type_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying a volume_type with destination_type=local is not supported'\n , str(ex))\n\n def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(\n self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying volume type to existing volume is not supported',\n str(ex))\n\n def test_image_mapping(self):\n removed_fields = ['id', 'instance_uuid', 'connection_info',\n 'created_at', 'updated_at', 'deleted_at', 'deleted']\n for bdm in self.new_mapping:\n mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm\n ).get_image_mapping()\n for fld in removed_fields:\n self.assertNotIn(fld, mapping_bdm)\n\n def _test_snapshot_from_bdm(self, template):\n snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)\n self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])\n self.assertEqual('snapshot', snapshot['source_type'])\n self.assertEqual('volume', snapshot['destination_type'])\n self.assertEqual(template.volume_size, snapshot['volume_size'])\n self.assertEqual(template.delete_on_termination, snapshot[\n 'delete_on_termination'])\n self.assertEqual(template.device_name, snapshot['device_name'])\n for key in ['disk_bus', 'device_type', 'boot_index']:\n self.assertEqual(template[key], snapshot[key])\n\n def test_snapshot_from_bdm(self):\n for bdm in self.new_mapping:\n self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))\n\n def test_snapshot_from_object(self):\n for bdm in self.new_mapping[:-1]:\n obj = objects.BlockDeviceMapping()\n obj = objects.BlockDeviceMapping._from_db_object(None, obj,\n fake_block_device.FakeDbBlockDeviceDict(bdm))\n self._test_snapshot_from_bdm(obj)\n\n\nclass GetBDMImageMetadataTestCase(test.NoDBTestCase):\n\n def setUp(self):\n super().setUp()\n self.compute_api = compute_api.API()\n self.context = context.RequestContext('fake', 'fake')\n\n def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},\n 'size': 0, 'status': 'active'}\n\n def get_vol_data(*args, **kwargs):\n return {'bootable': is_bootable}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n side_effect=get_vol_data):\n if not is_bootable:\n self.assertRaises(exception.InvalidBDMVolumeNotBootable,\n block_device.get_bdm_image_metadata, self.context, self\n .compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n else:\n meta = block_device.get_bdm_image_metadata(self.context,\n self.compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(expected_meta, meta)\n\n def test_get_bdm_image_metadata__non_bootable(self):\n self._test_get_bdm_image_metadata__bootable(False)\n\n def test_get_bdm_image_metadata__bootable(self):\n self._test_get_bdm_image_metadata__bootable(True)\n\n def test_get_bdm_image_metadata__basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n return_value=fake_volume):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n\n def test_get_bdm_image_metadata__snapshot_basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':\n None, 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n fake_snapshot = {'volume_id': '1'}\n with test.nested(mock.patch.object(self.compute_api.volume_api,\n 'get', return_value=fake_volume), mock.patch.object(self.\n compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)\n ) as (volume_get, volume_get_snapshot):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n volume_get_snapshot.assert_called_once_with(self.context,\n block_device_mapping[0]['snapshot_id'])\n volume_get.assert_called_once_with(self.context, fake_snapshot[\n 'volume_id'])\n\n @mock.patch.object(cinder.API, 'get', side_effect=exception.\n CinderConnectionFailed(reason='error'))\n def test_get_bdm_image_metadata__cinder_down(self, mock_get):\n bdms = [objects.BlockDeviceMapping(**fake_block_device.\n FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':\n 'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]\n self.assertRaises(exception.CinderConnectionFailed, block_device.\n get_bdm_image_metadata, self.context, self.compute_api.\n image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)\n\n\nclass GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):\n\n def test_inherit_image_properties(self):\n properties = {'fake_prop': 'fake_value'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(properties, image_meta['properties'])\n\n def test_image_size(self):\n volume = {'size': 10}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(10 * units.Gi, image_meta['size'])\n\n def test_image_status(self):\n volume = {}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual('active', image_meta['status'])\n\n def test_values_conversion(self):\n properties = {'min_ram': '5', 'min_disk': '7'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(5, image_meta['min_ram'])\n self.assertEqual(7, image_meta['min_disk'])\n\n def test_suppress_not_image_properties(self):\n properties = {'min_ram': '256', 'min_disk': '128', 'image_id':\n 'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',\n 'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual({}, image_meta['properties'])\n self.assertEqual(0, image_meta['size'])\n self.assertNotEqual({}, properties)\n",
"step-2": "<mask token>\n\n\nclass BlockDeviceTestCase(test.NoDBTestCase):\n <mask token>\n\n def test_properties(self):\n root_device0 = '/dev/sda'\n root_device1 = '/dev/sdb'\n mappings = [{'virtual': 'root', 'device': root_device0}]\n properties0 = {'mappings': mappings}\n properties1 = {'mappings': mappings, 'root_device_name': root_device1}\n self.assertIsNone(block_device.properties_root_device_name({}))\n self.assertEqual(root_device0, block_device.\n properties_root_device_name(properties0))\n self.assertEqual(root_device1, block_device.\n properties_root_device_name(properties1))\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def test_new_format_is_swap(self):\n expected_results = [True, False, False, False, False]\n for expected, bdm in zip(expected_results, self.new_mapping):\n res = block_device.new_format_is_swap(bdm)\n self.assertEqual(expected, res)\n <mask token>\n\n def test_validate_device_name(self):\n for value in [' ', 10, None, 'a' * 260]:\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n validate_device_name, value)\n <mask token>\n <mask token>\n\n\nclass TestBlockDeviceDict(test.NoDBTestCase):\n\n def setUp(self):\n super(TestBlockDeviceDict, self).setUp()\n BDM = block_device.BlockDeviceDict\n self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}, {'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}, {'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume', 'uuid':\n 'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'source_type':\n 'snapshot', 'destination_type': 'volume', 'uuid':\n 'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,\n 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\", 'boot_index': 0}), BDM({'id': 4,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',\n 'source_type': 'snapshot', 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.\n instance, 'no_device': True, 'device_name': '/dev/vdc'})]\n self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'delete_on_termination': True,\n 'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sdc1', 'delete_on_termination': \n True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda1', 'volume_id':\n 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\"}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'connection_info':\n \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id': \n 5, 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'source_type':\n 'image', 'destination_type': 'volume', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3',\n 'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sda4', 'source_type': 'image',\n 'destination_type': 'local', 'connection_info':\n \"{'fake': 'connection_info'}\", 'image_id': 'fake-image-id-2',\n 'boot_index': -1})]\n self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3'}]\n\n def test_init(self):\n\n def fake_validate(obj, dct):\n pass\n self.stub_out('nova.block_device.BlockDeviceDict._fields', set([\n 'field1', 'field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',\n set(['db_field1', 'db_field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._validate',\n fake_validate)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':\n 'bar', 'db_field1': 'baz'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},\n do_not_default=set(['field2']))\n self.assertIn('field1', dev_dict)\n self.assertNotIn('field2', dev_dict)\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict(field1='foo')\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'\n )\n self.assertEqual('foo', dev_dict['field1'])\n self.assertEqual('bar', dev_dict['field2'])\n\n def test_init_prepend_dev_to_device_name(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vda', bdm_dict['device_name'])\n bdm['device_name'] = '/dev/vdb'\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vdb', bdm_dict['device_name'])\n bdm['device_name'] = None\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertIsNone(bdm_dict['device_name'])\n\n def test_init_boolify_delete_on_termination(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertFalse(bdm_dict['delete_on_termination'])\n\n def test_validate(self):\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, {'bogus_field': 'lame_val'})\n lame_bdm = dict(self.new_mapping[2])\n del lame_bdm['source_type']\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_bdm)\n lame_bdm['no_device'] = True\n block_device.BlockDeviceDict(lame_bdm)\n lame_dev_bdm = dict(self.new_mapping[2])\n lame_dev_bdm['device_name'] = 'not a valid name'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n lame_dev_bdm['device_name'] = ''\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n cool_volume_size_bdm = dict(self.new_mapping[2])\n cool_volume_size_bdm['volume_size'] = '42'\n cool_volume_size_bdm = block_device.BlockDeviceDict(\n cool_volume_size_bdm)\n self.assertEqual(42, cool_volume_size_bdm['volume_size'])\n lame_volume_size_bdm = dict(self.new_mapping[2])\n lame_volume_size_bdm['volume_size'] = 'some_non_int_string'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_volume_size_bdm)\n truthy_bdm = dict(self.new_mapping[2])\n truthy_bdm['delete_on_termination'] = '1'\n truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)\n self.assertTrue(truthy_bdm['delete_on_termination'])\n verbose_bdm = dict(self.new_mapping[2])\n verbose_bdm['boot_index'] = 'first'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, verbose_bdm)\n\n def test_from_legacy(self):\n for legacy, new in zip(self.legacy_mapping, self.new_mapping):\n self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy\n ), matchers.IsSubDictOf(new))\n\n def test_from_legacy_mapping(self):\n\n def _get_image_bdms(bdms):\n return [bdm for bdm in bdms if bdm['source_type'] == 'image']\n\n def _get_bootable_bdms(bdms):\n return [bdm for bdm in bdms if bdm['boot_index'] is not None and\n bdm['boot_index'] >= 0]\n new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)\n self.assertEqual(0, len(_get_image_bdms(new_no_img)))\n for new, expected in zip(new_no_img, self.new_mapping):\n self.assertThat(new, matchers.IsSubDictOf(expected))\n new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref')\n image_bdms = _get_image_bdms(new_with_img)\n boot_bdms = _get_bootable_bdms(new_with_img)\n self.assertEqual(1, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, image_bdms[0]['boot_index'])\n self.assertEqual('image', boot_bdms[0]['source_type'])\n new_with_img_and_root = block_device.from_legacy_mapping(self.\n legacy_mapping, 'fake_image_ref', 'sda1')\n image_bdms = _get_image_bdms(new_with_img_and_root)\n boot_bdms = _get_bootable_bdms(new_with_img_and_root)\n self.assertEqual(0, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, boot_bdms[0]['boot_index'])\n self.assertEqual('volume', boot_bdms[0]['source_type'])\n new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref', 'sda1', no_root=True)\n self.assertEqual(0, len(_get_image_bdms(new_no_root)))\n self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))\n\n def test_from_api(self):\n for api, new in zip(self.api_mapping, self.new_mapping):\n new['connection_info'] = None\n if new['snapshot_id']:\n new['volume_id'] = None\n self.assertThat(block_device.BlockDeviceDict.from_api(api, \n False), matchers.IsSubDictOf(new))\n\n def test_from_api_invalid_blank_id(self):\n api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination': \n True, 'boot_index': -1}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_invalid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1'}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_valid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': 0}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_valid_source_to_local_mapping_with_string_bi(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': '0'}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_invalid_image_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type':\n 'fake-lvm-1', 'boot_index': 1}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn('Mapping image to local is not supported', str(ex))\n\n def test_from_api_invalid_volume_type_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying a volume_type with destination_type=local is not supported'\n , str(ex))\n\n def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(\n self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying volume type to existing volume is not supported',\n str(ex))\n\n def test_image_mapping(self):\n removed_fields = ['id', 'instance_uuid', 'connection_info',\n 'created_at', 'updated_at', 'deleted_at', 'deleted']\n for bdm in self.new_mapping:\n mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm\n ).get_image_mapping()\n for fld in removed_fields:\n self.assertNotIn(fld, mapping_bdm)\n\n def _test_snapshot_from_bdm(self, template):\n snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)\n self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])\n self.assertEqual('snapshot', snapshot['source_type'])\n self.assertEqual('volume', snapshot['destination_type'])\n self.assertEqual(template.volume_size, snapshot['volume_size'])\n self.assertEqual(template.delete_on_termination, snapshot[\n 'delete_on_termination'])\n self.assertEqual(template.device_name, snapshot['device_name'])\n for key in ['disk_bus', 'device_type', 'boot_index']:\n self.assertEqual(template[key], snapshot[key])\n\n def test_snapshot_from_bdm(self):\n for bdm in self.new_mapping:\n self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))\n\n def test_snapshot_from_object(self):\n for bdm in self.new_mapping[:-1]:\n obj = objects.BlockDeviceMapping()\n obj = objects.BlockDeviceMapping._from_db_object(None, obj,\n fake_block_device.FakeDbBlockDeviceDict(bdm))\n self._test_snapshot_from_bdm(obj)\n\n\nclass GetBDMImageMetadataTestCase(test.NoDBTestCase):\n\n def setUp(self):\n super().setUp()\n self.compute_api = compute_api.API()\n self.context = context.RequestContext('fake', 'fake')\n\n def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},\n 'size': 0, 'status': 'active'}\n\n def get_vol_data(*args, **kwargs):\n return {'bootable': is_bootable}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n side_effect=get_vol_data):\n if not is_bootable:\n self.assertRaises(exception.InvalidBDMVolumeNotBootable,\n block_device.get_bdm_image_metadata, self.context, self\n .compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n else:\n meta = block_device.get_bdm_image_metadata(self.context,\n self.compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(expected_meta, meta)\n\n def test_get_bdm_image_metadata__non_bootable(self):\n self._test_get_bdm_image_metadata__bootable(False)\n\n def test_get_bdm_image_metadata__bootable(self):\n self._test_get_bdm_image_metadata__bootable(True)\n\n def test_get_bdm_image_metadata__basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n return_value=fake_volume):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n\n def test_get_bdm_image_metadata__snapshot_basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':\n None, 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n fake_snapshot = {'volume_id': '1'}\n with test.nested(mock.patch.object(self.compute_api.volume_api,\n 'get', return_value=fake_volume), mock.patch.object(self.\n compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)\n ) as (volume_get, volume_get_snapshot):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n volume_get_snapshot.assert_called_once_with(self.context,\n block_device_mapping[0]['snapshot_id'])\n volume_get.assert_called_once_with(self.context, fake_snapshot[\n 'volume_id'])\n\n @mock.patch.object(cinder.API, 'get', side_effect=exception.\n CinderConnectionFailed(reason='error'))\n def test_get_bdm_image_metadata__cinder_down(self, mock_get):\n bdms = [objects.BlockDeviceMapping(**fake_block_device.\n FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':\n 'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]\n self.assertRaises(exception.CinderConnectionFailed, block_device.\n get_bdm_image_metadata, self.context, self.compute_api.\n image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)\n\n\nclass GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):\n\n def test_inherit_image_properties(self):\n properties = {'fake_prop': 'fake_value'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(properties, image_meta['properties'])\n\n def test_image_size(self):\n volume = {'size': 10}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(10 * units.Gi, image_meta['size'])\n\n def test_image_status(self):\n volume = {}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual('active', image_meta['status'])\n\n def test_values_conversion(self):\n properties = {'min_ram': '5', 'min_disk': '7'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(5, image_meta['min_ram'])\n self.assertEqual(7, image_meta['min_disk'])\n\n def test_suppress_not_image_properties(self):\n properties = {'min_ram': '256', 'min_disk': '128', 'image_id':\n 'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',\n 'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual({}, image_meta['properties'])\n self.assertEqual(0, image_meta['size'])\n self.assertNotEqual({}, properties)\n",
"step-3": "<mask token>\n\n\nclass BlockDeviceTestCase(test.NoDBTestCase):\n <mask token>\n\n def test_properties(self):\n root_device0 = '/dev/sda'\n root_device1 = '/dev/sdb'\n mappings = [{'virtual': 'root', 'device': root_device0}]\n properties0 = {'mappings': mappings}\n properties1 = {'mappings': mappings, 'root_device_name': root_device1}\n self.assertIsNone(block_device.properties_root_device_name({}))\n self.assertEqual(root_device0, block_device.\n properties_root_device_name(properties0))\n self.assertEqual(root_device1, block_device.\n properties_root_device_name(properties1))\n <mask token>\n <mask token>\n\n def test_strip_dev(self):\n self.assertEqual('sda', block_device.strip_dev('/dev/sda'))\n self.assertEqual('sda', block_device.strip_dev('sda'))\n self.assertIsNone(block_device.strip_dev(None))\n <mask token>\n\n def test_get_device_letter(self):\n self.assertEqual('', block_device.get_device_letter(''))\n self.assertEqual('a', block_device.get_device_letter('/dev/sda1'))\n self.assertEqual('b', block_device.get_device_letter('/dev/xvdb'))\n self.assertEqual('d', block_device.get_device_letter('/dev/d'))\n self.assertEqual('a', block_device.get_device_letter('a'))\n self.assertEqual('b', block_device.get_device_letter('sdb2'))\n self.assertEqual('c', block_device.get_device_letter('vdc'))\n self.assertEqual('c', block_device.get_device_letter('hdc'))\n self.assertIsNone(block_device.get_device_letter(None))\n\n def test_generate_device_name(self):\n expected = ('vda', ('vd', 0)), ('vdaa', ('vd', 26)), ('vdabc', (\n 'vd', 730)), ('vdidpok', ('vd', 4194304)), ('sdc', ('sd', 2)), (\n 'sdaa', ('sd', 26)), ('sdiw', ('sd', 256)), ('hdzz', ('hd', 701))\n for res, args in expected:\n self.assertEqual(res, block_device.generate_device_name(*args))\n <mask token>\n\n def test_get_root_bdm(self):\n root_bdm = {'device_name': 'vda', 'boot_index': 0}\n bdms = [root_bdm, {'device_name': 'vdb', 'boot_index': 1}, {\n 'device_name': 'vdc', 'boot_index': -1}, {'device_name': 'vdd'}]\n self.assertEqual(root_bdm, block_device.get_root_bdm(bdms))\n self.assertEqual(root_bdm, block_device.get_root_bdm([bdms[0]]))\n self.assertIsNone(block_device.get_root_bdm(bdms[1:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[2:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[3:]))\n self.assertIsNone(block_device.get_root_bdm([]))\n <mask token>\n <mask token>\n\n def test_get_bdm_local_disk_num(self):\n size = block_device.get_bdm_local_disk_num(self.new_mapping)\n self.assertEqual(2, size)\n\n def test_new_format_is_swap(self):\n expected_results = [True, False, False, False, False]\n for expected, bdm in zip(expected_results, self.new_mapping):\n res = block_device.new_format_is_swap(bdm)\n self.assertEqual(expected, res)\n <mask token>\n\n def test_validate_device_name(self):\n for value in [' ', 10, None, 'a' * 260]:\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n validate_device_name, value)\n <mask token>\n <mask token>\n\n\nclass TestBlockDeviceDict(test.NoDBTestCase):\n\n def setUp(self):\n super(TestBlockDeviceDict, self).setUp()\n BDM = block_device.BlockDeviceDict\n self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}, {'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}, {'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume', 'uuid':\n 'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'source_type':\n 'snapshot', 'destination_type': 'volume', 'uuid':\n 'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,\n 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\", 'boot_index': 0}), BDM({'id': 4,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',\n 'source_type': 'snapshot', 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.\n instance, 'no_device': True, 'device_name': '/dev/vdc'})]\n self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'delete_on_termination': True,\n 'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sdc1', 'delete_on_termination': \n True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda1', 'volume_id':\n 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\"}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'connection_info':\n \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id': \n 5, 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'source_type':\n 'image', 'destination_type': 'volume', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3',\n 'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sda4', 'source_type': 'image',\n 'destination_type': 'local', 'connection_info':\n \"{'fake': 'connection_info'}\", 'image_id': 'fake-image-id-2',\n 'boot_index': -1})]\n self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3'}]\n\n def test_init(self):\n\n def fake_validate(obj, dct):\n pass\n self.stub_out('nova.block_device.BlockDeviceDict._fields', set([\n 'field1', 'field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',\n set(['db_field1', 'db_field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._validate',\n fake_validate)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':\n 'bar', 'db_field1': 'baz'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},\n do_not_default=set(['field2']))\n self.assertIn('field1', dev_dict)\n self.assertNotIn('field2', dev_dict)\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict(field1='foo')\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'\n )\n self.assertEqual('foo', dev_dict['field1'])\n self.assertEqual('bar', dev_dict['field2'])\n\n def test_init_prepend_dev_to_device_name(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vda', bdm_dict['device_name'])\n bdm['device_name'] = '/dev/vdb'\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vdb', bdm_dict['device_name'])\n bdm['device_name'] = None\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertIsNone(bdm_dict['device_name'])\n\n def test_init_boolify_delete_on_termination(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertFalse(bdm_dict['delete_on_termination'])\n\n def test_validate(self):\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, {'bogus_field': 'lame_val'})\n lame_bdm = dict(self.new_mapping[2])\n del lame_bdm['source_type']\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_bdm)\n lame_bdm['no_device'] = True\n block_device.BlockDeviceDict(lame_bdm)\n lame_dev_bdm = dict(self.new_mapping[2])\n lame_dev_bdm['device_name'] = 'not a valid name'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n lame_dev_bdm['device_name'] = ''\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n cool_volume_size_bdm = dict(self.new_mapping[2])\n cool_volume_size_bdm['volume_size'] = '42'\n cool_volume_size_bdm = block_device.BlockDeviceDict(\n cool_volume_size_bdm)\n self.assertEqual(42, cool_volume_size_bdm['volume_size'])\n lame_volume_size_bdm = dict(self.new_mapping[2])\n lame_volume_size_bdm['volume_size'] = 'some_non_int_string'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_volume_size_bdm)\n truthy_bdm = dict(self.new_mapping[2])\n truthy_bdm['delete_on_termination'] = '1'\n truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)\n self.assertTrue(truthy_bdm['delete_on_termination'])\n verbose_bdm = dict(self.new_mapping[2])\n verbose_bdm['boot_index'] = 'first'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, verbose_bdm)\n\n def test_from_legacy(self):\n for legacy, new in zip(self.legacy_mapping, self.new_mapping):\n self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy\n ), matchers.IsSubDictOf(new))\n\n def test_from_legacy_mapping(self):\n\n def _get_image_bdms(bdms):\n return [bdm for bdm in bdms if bdm['source_type'] == 'image']\n\n def _get_bootable_bdms(bdms):\n return [bdm for bdm in bdms if bdm['boot_index'] is not None and\n bdm['boot_index'] >= 0]\n new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)\n self.assertEqual(0, len(_get_image_bdms(new_no_img)))\n for new, expected in zip(new_no_img, self.new_mapping):\n self.assertThat(new, matchers.IsSubDictOf(expected))\n new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref')\n image_bdms = _get_image_bdms(new_with_img)\n boot_bdms = _get_bootable_bdms(new_with_img)\n self.assertEqual(1, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, image_bdms[0]['boot_index'])\n self.assertEqual('image', boot_bdms[0]['source_type'])\n new_with_img_and_root = block_device.from_legacy_mapping(self.\n legacy_mapping, 'fake_image_ref', 'sda1')\n image_bdms = _get_image_bdms(new_with_img_and_root)\n boot_bdms = _get_bootable_bdms(new_with_img_and_root)\n self.assertEqual(0, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, boot_bdms[0]['boot_index'])\n self.assertEqual('volume', boot_bdms[0]['source_type'])\n new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref', 'sda1', no_root=True)\n self.assertEqual(0, len(_get_image_bdms(new_no_root)))\n self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))\n\n def test_from_api(self):\n for api, new in zip(self.api_mapping, self.new_mapping):\n new['connection_info'] = None\n if new['snapshot_id']:\n new['volume_id'] = None\n self.assertThat(block_device.BlockDeviceDict.from_api(api, \n False), matchers.IsSubDictOf(new))\n\n def test_from_api_invalid_blank_id(self):\n api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination': \n True, 'boot_index': -1}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_invalid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1'}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_valid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': 0}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_valid_source_to_local_mapping_with_string_bi(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': '0'}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_invalid_image_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type':\n 'fake-lvm-1', 'boot_index': 1}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn('Mapping image to local is not supported', str(ex))\n\n def test_from_api_invalid_volume_type_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying a volume_type with destination_type=local is not supported'\n , str(ex))\n\n def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(\n self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying volume type to existing volume is not supported',\n str(ex))\n\n def test_image_mapping(self):\n removed_fields = ['id', 'instance_uuid', 'connection_info',\n 'created_at', 'updated_at', 'deleted_at', 'deleted']\n for bdm in self.new_mapping:\n mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm\n ).get_image_mapping()\n for fld in removed_fields:\n self.assertNotIn(fld, mapping_bdm)\n\n def _test_snapshot_from_bdm(self, template):\n snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)\n self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])\n self.assertEqual('snapshot', snapshot['source_type'])\n self.assertEqual('volume', snapshot['destination_type'])\n self.assertEqual(template.volume_size, snapshot['volume_size'])\n self.assertEqual(template.delete_on_termination, snapshot[\n 'delete_on_termination'])\n self.assertEqual(template.device_name, snapshot['device_name'])\n for key in ['disk_bus', 'device_type', 'boot_index']:\n self.assertEqual(template[key], snapshot[key])\n\n def test_snapshot_from_bdm(self):\n for bdm in self.new_mapping:\n self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))\n\n def test_snapshot_from_object(self):\n for bdm in self.new_mapping[:-1]:\n obj = objects.BlockDeviceMapping()\n obj = objects.BlockDeviceMapping._from_db_object(None, obj,\n fake_block_device.FakeDbBlockDeviceDict(bdm))\n self._test_snapshot_from_bdm(obj)\n\n\nclass GetBDMImageMetadataTestCase(test.NoDBTestCase):\n\n def setUp(self):\n super().setUp()\n self.compute_api = compute_api.API()\n self.context = context.RequestContext('fake', 'fake')\n\n def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},\n 'size': 0, 'status': 'active'}\n\n def get_vol_data(*args, **kwargs):\n return {'bootable': is_bootable}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n side_effect=get_vol_data):\n if not is_bootable:\n self.assertRaises(exception.InvalidBDMVolumeNotBootable,\n block_device.get_bdm_image_metadata, self.context, self\n .compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n else:\n meta = block_device.get_bdm_image_metadata(self.context,\n self.compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(expected_meta, meta)\n\n def test_get_bdm_image_metadata__non_bootable(self):\n self._test_get_bdm_image_metadata__bootable(False)\n\n def test_get_bdm_image_metadata__bootable(self):\n self._test_get_bdm_image_metadata__bootable(True)\n\n def test_get_bdm_image_metadata__basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n return_value=fake_volume):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n\n def test_get_bdm_image_metadata__snapshot_basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':\n None, 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n fake_snapshot = {'volume_id': '1'}\n with test.nested(mock.patch.object(self.compute_api.volume_api,\n 'get', return_value=fake_volume), mock.patch.object(self.\n compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)\n ) as (volume_get, volume_get_snapshot):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n volume_get_snapshot.assert_called_once_with(self.context,\n block_device_mapping[0]['snapshot_id'])\n volume_get.assert_called_once_with(self.context, fake_snapshot[\n 'volume_id'])\n\n @mock.patch.object(cinder.API, 'get', side_effect=exception.\n CinderConnectionFailed(reason='error'))\n def test_get_bdm_image_metadata__cinder_down(self, mock_get):\n bdms = [objects.BlockDeviceMapping(**fake_block_device.\n FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':\n 'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]\n self.assertRaises(exception.CinderConnectionFailed, block_device.\n get_bdm_image_metadata, self.context, self.compute_api.\n image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)\n\n\nclass GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):\n\n def test_inherit_image_properties(self):\n properties = {'fake_prop': 'fake_value'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(properties, image_meta['properties'])\n\n def test_image_size(self):\n volume = {'size': 10}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(10 * units.Gi, image_meta['size'])\n\n def test_image_status(self):\n volume = {}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual('active', image_meta['status'])\n\n def test_values_conversion(self):\n properties = {'min_ram': '5', 'min_disk': '7'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(5, image_meta['min_ram'])\n self.assertEqual(7, image_meta['min_disk'])\n\n def test_suppress_not_image_properties(self):\n properties = {'min_ram': '256', 'min_disk': '128', 'image_id':\n 'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',\n 'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual({}, image_meta['properties'])\n self.assertEqual(0, image_meta['size'])\n self.assertNotEqual({}, properties)\n",
"step-4": "<mask token>\n\n\nclass BlockDeviceTestCase(test.NoDBTestCase):\n <mask token>\n\n def test_properties(self):\n root_device0 = '/dev/sda'\n root_device1 = '/dev/sdb'\n mappings = [{'virtual': 'root', 'device': root_device0}]\n properties0 = {'mappings': mappings}\n properties1 = {'mappings': mappings, 'root_device_name': root_device1}\n self.assertIsNone(block_device.properties_root_device_name({}))\n self.assertEqual(root_device0, block_device.\n properties_root_device_name(properties0))\n self.assertEqual(root_device1, block_device.\n properties_root_device_name(properties1))\n <mask token>\n\n def test_mappings_prepend_dev(self):\n mapping = [{'virtual': 'ami', 'device': '/dev/sda'}, {'virtual':\n 'root', 'device': 'sda'}, {'virtual': 'ephemeral0', 'device':\n 'sdb'}, {'virtual': 'swap', 'device': 'sdc'}, {'virtual':\n 'ephemeral1', 'device': 'sdd'}, {'virtual': 'ephemeral2',\n 'device': 'sde'}]\n expected = [{'virtual': 'ami', 'device': '/dev/sda'}, {'virtual':\n 'root', 'device': 'sda'}, {'virtual': 'ephemeral0', 'device':\n '/dev/sdb'}, {'virtual': 'swap', 'device': '/dev/sdc'}, {\n 'virtual': 'ephemeral1', 'device': '/dev/sdd'}, {'virtual':\n 'ephemeral2', 'device': '/dev/sde'}]\n prepended = block_device.mappings_prepend_dev(mapping)\n self.assertEqual(sorted(expected, key=lambda v: v['virtual']),\n sorted(prepended, key=lambda v: v['virtual']))\n\n def test_strip_dev(self):\n self.assertEqual('sda', block_device.strip_dev('/dev/sda'))\n self.assertEqual('sda', block_device.strip_dev('sda'))\n self.assertIsNone(block_device.strip_dev(None))\n\n def test_strip_prefix(self):\n self.assertEqual('a', block_device.strip_prefix('/dev/sda'))\n self.assertEqual('a', block_device.strip_prefix('a'))\n self.assertEqual('a', block_device.strip_prefix('xvda'))\n self.assertEqual('a', block_device.strip_prefix('vda'))\n self.assertEqual('a', block_device.strip_prefix('hda'))\n self.assertIsNone(block_device.strip_prefix(None))\n\n def test_get_device_letter(self):\n self.assertEqual('', block_device.get_device_letter(''))\n self.assertEqual('a', block_device.get_device_letter('/dev/sda1'))\n self.assertEqual('b', block_device.get_device_letter('/dev/xvdb'))\n self.assertEqual('d', block_device.get_device_letter('/dev/d'))\n self.assertEqual('a', block_device.get_device_letter('a'))\n self.assertEqual('b', block_device.get_device_letter('sdb2'))\n self.assertEqual('c', block_device.get_device_letter('vdc'))\n self.assertEqual('c', block_device.get_device_letter('hdc'))\n self.assertIsNone(block_device.get_device_letter(None))\n\n def test_generate_device_name(self):\n expected = ('vda', ('vd', 0)), ('vdaa', ('vd', 26)), ('vdabc', (\n 'vd', 730)), ('vdidpok', ('vd', 4194304)), ('sdc', ('sd', 2)), (\n 'sdaa', ('sd', 26)), ('sdiw', ('sd', 256)), ('hdzz', ('hd', 701))\n for res, args in expected:\n self.assertEqual(res, block_device.generate_device_name(*args))\n <mask token>\n\n def test_get_root_bdm(self):\n root_bdm = {'device_name': 'vda', 'boot_index': 0}\n bdms = [root_bdm, {'device_name': 'vdb', 'boot_index': 1}, {\n 'device_name': 'vdc', 'boot_index': -1}, {'device_name': 'vdd'}]\n self.assertEqual(root_bdm, block_device.get_root_bdm(bdms))\n self.assertEqual(root_bdm, block_device.get_root_bdm([bdms[0]]))\n self.assertIsNone(block_device.get_root_bdm(bdms[1:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[2:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[3:]))\n self.assertIsNone(block_device.get_root_bdm([]))\n\n def test_get_bdm_ephemeral_disk_size(self):\n size = block_device.get_bdm_ephemeral_disk_size(self.new_mapping)\n self.assertEqual(10, size)\n <mask token>\n\n def test_get_bdm_local_disk_num(self):\n size = block_device.get_bdm_local_disk_num(self.new_mapping)\n self.assertEqual(2, size)\n\n def test_new_format_is_swap(self):\n expected_results = [True, False, False, False, False]\n for expected, bdm in zip(expected_results, self.new_mapping):\n res = block_device.new_format_is_swap(bdm)\n self.assertEqual(expected, res)\n <mask token>\n\n def test_validate_device_name(self):\n for value in [' ', 10, None, 'a' * 260]:\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n validate_device_name, value)\n <mask token>\n <mask token>\n\n\nclass TestBlockDeviceDict(test.NoDBTestCase):\n\n def setUp(self):\n super(TestBlockDeviceDict, self).setUp()\n BDM = block_device.BlockDeviceDict\n self.api_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}, {'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}, {'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume', 'uuid':\n 'fake-volume-id-1', 'boot_index': 0}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'source_type':\n 'snapshot', 'destination_type': 'volume', 'uuid':\n 'fake-snapshot-id-1', 'boot_index': -1}, {'id': 5,\n 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping = [BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'source_type': 'blank',\n 'destination_type': 'local', 'delete_on_termination': True,\n 'guest_format': 'swap', 'boot_index': -1}), BDM({'id': 2,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sdc1',\n 'source_type': 'blank', 'destination_type': 'local',\n 'delete_on_termination': True, 'boot_index': -1}), BDM({'id': 3,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda1',\n 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\", 'boot_index': 0}), BDM({'id': 4,\n 'instance_uuid': uuids.instance, 'device_name': '/dev/sda2',\n 'source_type': 'snapshot', 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}), BDM({'id': 5, 'instance_uuid': uuids.\n instance, 'no_device': True, 'device_name': '/dev/vdc'})]\n self.legacy_mapping = [{'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1', 'delete_on_termination': True,\n 'virtual_name': 'swap'}, {'id': 2, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sdc1', 'delete_on_termination': \n True, 'virtual_name': 'ephemeral0'}, {'id': 3, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda1', 'volume_id':\n 'fake-volume-id-1', 'connection_info':\n \"{'fake': 'connection_info'}\"}, {'id': 4, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda2', 'connection_info':\n \"{'fake': 'connection_info'}\", 'snapshot_id':\n 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2'}, {'id': \n 5, 'instance_uuid': uuids.instance, 'no_device': True,\n 'device_name': '/dev/vdc'}]\n self.new_mapping_source_image = [BDM({'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'source_type':\n 'image', 'destination_type': 'volume', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3',\n 'boot_index': -1}), BDM({'id': 7, 'instance_uuid': uuids.\n instance, 'device_name': '/dev/sda4', 'source_type': 'image',\n 'destination_type': 'local', 'connection_info':\n \"{'fake': 'connection_info'}\", 'image_id': 'fake-image-id-2',\n 'boot_index': -1})]\n self.legacy_mapping_source_image = [{'id': 6, 'instance_uuid':\n uuids.instance, 'device_name': '/dev/sda3', 'connection_info':\n \"{'fake': 'connection_info'}\", 'volume_id': 'fake-volume-id-3'}]\n\n def test_init(self):\n\n def fake_validate(obj, dct):\n pass\n self.stub_out('nova.block_device.BlockDeviceDict._fields', set([\n 'field1', 'field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',\n set(['db_field1', 'db_field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._validate',\n fake_validate)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo', 'field2':\n 'bar', 'db_field1': 'baz'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},\n do_not_default=set(['field2']))\n self.assertIn('field1', dev_dict)\n self.assertNotIn('field2', dev_dict)\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n dev_dict = block_device.BlockDeviceDict(field1='foo')\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'}, field2='bar'\n )\n self.assertEqual('foo', dev_dict['field1'])\n self.assertEqual('bar', dev_dict['field2'])\n\n def test_init_prepend_dev_to_device_name(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vda', bdm_dict['device_name'])\n bdm['device_name'] = '/dev/vdb'\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vdb', bdm_dict['device_name'])\n bdm['device_name'] = None\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertIsNone(bdm_dict['device_name'])\n\n def test_init_boolify_delete_on_termination(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance, 'device_name':\n 'vda', 'source_type': 'volume', 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertFalse(bdm_dict['delete_on_termination'])\n\n def test_validate(self):\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, {'bogus_field': 'lame_val'})\n lame_bdm = dict(self.new_mapping[2])\n del lame_bdm['source_type']\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_bdm)\n lame_bdm['no_device'] = True\n block_device.BlockDeviceDict(lame_bdm)\n lame_dev_bdm = dict(self.new_mapping[2])\n lame_dev_bdm['device_name'] = 'not a valid name'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n lame_dev_bdm['device_name'] = ''\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_dev_bdm)\n cool_volume_size_bdm = dict(self.new_mapping[2])\n cool_volume_size_bdm['volume_size'] = '42'\n cool_volume_size_bdm = block_device.BlockDeviceDict(\n cool_volume_size_bdm)\n self.assertEqual(42, cool_volume_size_bdm['volume_size'])\n lame_volume_size_bdm = dict(self.new_mapping[2])\n lame_volume_size_bdm['volume_size'] = 'some_non_int_string'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, lame_volume_size_bdm)\n truthy_bdm = dict(self.new_mapping[2])\n truthy_bdm['delete_on_termination'] = '1'\n truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)\n self.assertTrue(truthy_bdm['delete_on_termination'])\n verbose_bdm = dict(self.new_mapping[2])\n verbose_bdm['boot_index'] = 'first'\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict, verbose_bdm)\n\n def test_from_legacy(self):\n for legacy, new in zip(self.legacy_mapping, self.new_mapping):\n self.assertThat(block_device.BlockDeviceDict.from_legacy(legacy\n ), matchers.IsSubDictOf(new))\n\n def test_from_legacy_mapping(self):\n\n def _get_image_bdms(bdms):\n return [bdm for bdm in bdms if bdm['source_type'] == 'image']\n\n def _get_bootable_bdms(bdms):\n return [bdm for bdm in bdms if bdm['boot_index'] is not None and\n bdm['boot_index'] >= 0]\n new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)\n self.assertEqual(0, len(_get_image_bdms(new_no_img)))\n for new, expected in zip(new_no_img, self.new_mapping):\n self.assertThat(new, matchers.IsSubDictOf(expected))\n new_with_img = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref')\n image_bdms = _get_image_bdms(new_with_img)\n boot_bdms = _get_bootable_bdms(new_with_img)\n self.assertEqual(1, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, image_bdms[0]['boot_index'])\n self.assertEqual('image', boot_bdms[0]['source_type'])\n new_with_img_and_root = block_device.from_legacy_mapping(self.\n legacy_mapping, 'fake_image_ref', 'sda1')\n image_bdms = _get_image_bdms(new_with_img_and_root)\n boot_bdms = _get_bootable_bdms(new_with_img_and_root)\n self.assertEqual(0, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, boot_bdms[0]['boot_index'])\n self.assertEqual('volume', boot_bdms[0]['source_type'])\n new_no_root = block_device.from_legacy_mapping(self.legacy_mapping,\n 'fake_image_ref', 'sda1', no_root=True)\n self.assertEqual(0, len(_get_image_bdms(new_no_root)))\n self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))\n\n def test_from_api(self):\n for api, new in zip(self.api_mapping, self.new_mapping):\n new['connection_info'] = None\n if new['snapshot_id']:\n new['volume_id'] = None\n self.assertThat(block_device.BlockDeviceDict.from_api(api, \n False), matchers.IsSubDictOf(new))\n\n def test_from_api_invalid_blank_id(self):\n api_dict = {'id': 1, 'source_type': 'blank', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'delete_on_termination': \n True, 'boot_index': -1}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_invalid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1'}\n self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n\n def test_from_api_valid_source_to_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': 0}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_valid_source_to_local_mapping_with_string_bi(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'volume_id': 'fake-volume-id-1', 'uuid': 1,\n 'boot_index': '0'}\n retexp = block_device.BlockDeviceDict({'id': 1, 'source_type':\n 'image', 'image_id': 1, 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1', 'boot_index': 0})\n self.assertEqual(retexp, block_device.BlockDeviceDict.from_api(\n api_dict, True))\n\n def test_from_api_invalid_image_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'image', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type':\n 'fake-lvm-1', 'boot_index': 1}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn('Mapping image to local is not supported', str(ex))\n\n def test_from_api_invalid_volume_type_to_destination_local_mapping(self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'local', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying a volume_type with destination_type=local is not supported'\n , str(ex))\n\n def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(\n self):\n api_dict = {'id': 1, 'source_type': 'volume', 'destination_type':\n 'volume', 'uuid': 'fake-volume-id-1', 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat, block_device.\n BlockDeviceDict.from_api, api_dict, False)\n self.assertIn(\n 'Specifying volume type to existing volume is not supported',\n str(ex))\n\n def test_image_mapping(self):\n removed_fields = ['id', 'instance_uuid', 'connection_info',\n 'created_at', 'updated_at', 'deleted_at', 'deleted']\n for bdm in self.new_mapping:\n mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(bdm\n ).get_image_mapping()\n for fld in removed_fields:\n self.assertNotIn(fld, mapping_bdm)\n\n def _test_snapshot_from_bdm(self, template):\n snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)\n self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])\n self.assertEqual('snapshot', snapshot['source_type'])\n self.assertEqual('volume', snapshot['destination_type'])\n self.assertEqual(template.volume_size, snapshot['volume_size'])\n self.assertEqual(template.delete_on_termination, snapshot[\n 'delete_on_termination'])\n self.assertEqual(template.device_name, snapshot['device_name'])\n for key in ['disk_bus', 'device_type', 'boot_index']:\n self.assertEqual(template[key], snapshot[key])\n\n def test_snapshot_from_bdm(self):\n for bdm in self.new_mapping:\n self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))\n\n def test_snapshot_from_object(self):\n for bdm in self.new_mapping[:-1]:\n obj = objects.BlockDeviceMapping()\n obj = objects.BlockDeviceMapping._from_db_object(None, obj,\n fake_block_device.FakeDbBlockDeviceDict(bdm))\n self._test_snapshot_from_bdm(obj)\n\n\nclass GetBDMImageMetadataTestCase(test.NoDBTestCase):\n\n def setUp(self):\n super().setUp()\n self.compute_api = compute_api.API()\n self.context = context.RequestContext('fake', 'fake')\n\n def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n expected_meta = {'min_disk': 0, 'min_ram': 0, 'properties': {},\n 'size': 0, 'status': 'active'}\n\n def get_vol_data(*args, **kwargs):\n return {'bootable': is_bootable}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n side_effect=get_vol_data):\n if not is_bootable:\n self.assertRaises(exception.InvalidBDMVolumeNotBootable,\n block_device.get_bdm_image_metadata, self.context, self\n .compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n else:\n meta = block_device.get_bdm_image_metadata(self.context,\n self.compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(expected_meta, meta)\n\n def test_get_bdm_image_metadata__non_bootable(self):\n self._test_get_bdm_image_metadata__bootable(False)\n\n def test_get_bdm_image_metadata__bootable(self):\n self._test_get_bdm_image_metadata__bootable(True)\n\n def test_get_bdm_image_metadata__basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': None, 'volume_id':\n '1', 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n with mock.patch.object(self.compute_api.volume_api, 'get',\n return_value=fake_volume):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n\n def test_get_bdm_image_metadata__snapshot_basic_property(self):\n block_device_mapping = [{'id': 1, 'device_name': 'vda', 'no_device':\n None, 'virtual_name': None, 'snapshot_id': '2', 'volume_id':\n None, 'delete_on_termination': False}]\n fake_volume = {'volume_image_metadata': {'min_ram': 256, 'min_disk':\n 128, 'foo': 'bar'}}\n fake_snapshot = {'volume_id': '1'}\n with test.nested(mock.patch.object(self.compute_api.volume_api,\n 'get', return_value=fake_volume), mock.patch.object(self.\n compute_api.volume_api, 'get_snapshot', return_value=fake_snapshot)\n ) as (volume_get, volume_get_snapshot):\n meta = block_device.get_bdm_image_metadata(self.context, self.\n compute_api.image_api, self.compute_api.volume_api,\n block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n volume_get_snapshot.assert_called_once_with(self.context,\n block_device_mapping[0]['snapshot_id'])\n volume_get.assert_called_once_with(self.context, fake_snapshot[\n 'volume_id'])\n\n @mock.patch.object(cinder.API, 'get', side_effect=exception.\n CinderConnectionFailed(reason='error'))\n def test_get_bdm_image_metadata__cinder_down(self, mock_get):\n bdms = [objects.BlockDeviceMapping(**fake_block_device.\n FakeDbBlockDeviceDict({'id': 1, 'volume_id': 1, 'source_type':\n 'volume', 'destination_type': 'volume', 'device_name': 'vda'}))]\n self.assertRaises(exception.CinderConnectionFailed, block_device.\n get_bdm_image_metadata, self.context, self.compute_api.\n image_api, self.compute_api.volume_api, bdms, legacy_bdm=True)\n\n\nclass GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):\n\n def test_inherit_image_properties(self):\n properties = {'fake_prop': 'fake_value'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(properties, image_meta['properties'])\n\n def test_image_size(self):\n volume = {'size': 10}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(10 * units.Gi, image_meta['size'])\n\n def test_image_status(self):\n volume = {}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual('active', image_meta['status'])\n\n def test_values_conversion(self):\n properties = {'min_ram': '5', 'min_disk': '7'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(5, image_meta['min_ram'])\n self.assertEqual(7, image_meta['min_disk'])\n\n def test_suppress_not_image_properties(self):\n properties = {'min_ram': '256', 'min_disk': '128', 'image_id':\n 'fake_id', 'image_name': 'fake_name', 'container_format': 'ami',\n 'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual({}, image_meta['properties'])\n self.assertEqual(0, image_meta['size'])\n self.assertNotEqual({}, properties)\n",
"step-5": "# Copyright 2011 Isaku Yamahata\n# All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\n\"\"\"\nTests for Block Device utility functions.\n\"\"\"\n\nfrom unittest import mock\n\nfrom oslo_utils.fixture import uuidsentinel as uuids\nfrom oslo_utils import units\n\nfrom nova import block_device\nfrom nova.compute import api as compute_api\nfrom nova import context\nfrom nova import exception\nfrom nova import objects\nfrom nova import test\nfrom nova.tests.unit import fake_block_device\nfrom nova.tests.unit import matchers\nfrom nova.volume import cinder\n\n\nclass BlockDeviceTestCase(test.NoDBTestCase):\n def setUp(self):\n super(BlockDeviceTestCase, self).setUp()\n BDM = block_device.BlockDeviceDict\n\n self.new_mapping = [\n BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'delete_on_termination': True,\n 'volume_size': 1,\n 'guest_format': 'swap',\n 'boot_index': -1}),\n BDM({'id': 2, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdc1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'volume_size': 10,\n 'delete_on_termination': True,\n 'boot_index': -1}),\n BDM({'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda1',\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'boot_index': 0}),\n BDM({'id': 4, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda2',\n 'source_type': 'snapshot',\n 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'snapshot_id': 'fake-snapshot-id-1',\n 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}),\n BDM({'id': 5, 'instance_uuid': uuids.instance,\n 'no_device': True,\n 'device_name': '/dev/vdc'}),\n ]\n\n def test_properties(self):\n root_device0 = '/dev/sda'\n root_device1 = '/dev/sdb'\n mappings = [{'virtual': 'root',\n 'device': root_device0}]\n\n properties0 = {'mappings': mappings}\n properties1 = {'mappings': mappings,\n 'root_device_name': root_device1}\n\n self.assertIsNone(block_device.properties_root_device_name({}))\n self.assertEqual(root_device0,\n block_device.properties_root_device_name(properties0))\n self.assertEqual(root_device1,\n block_device.properties_root_device_name(properties1))\n\n def test_ephemeral(self):\n self.assertFalse(block_device.is_ephemeral('ephemeral'))\n self.assertTrue(block_device.is_ephemeral('ephemeral0'))\n self.assertTrue(block_device.is_ephemeral('ephemeral1'))\n self.assertTrue(block_device.is_ephemeral('ephemeral11'))\n self.assertFalse(block_device.is_ephemeral('root'))\n self.assertFalse(block_device.is_ephemeral('swap'))\n self.assertFalse(block_device.is_ephemeral('/dev/sda1'))\n\n self.assertEqual(0, block_device.ephemeral_num('ephemeral0'))\n self.assertEqual(1, block_device.ephemeral_num('ephemeral1'))\n self.assertEqual(11, block_device.ephemeral_num('ephemeral11'))\n\n self.assertFalse(block_device.is_swap_or_ephemeral('ephemeral'))\n self.assertTrue(block_device.is_swap_or_ephemeral('ephemeral0'))\n self.assertTrue(block_device.is_swap_or_ephemeral('ephemeral1'))\n self.assertTrue(block_device.is_swap_or_ephemeral('swap'))\n self.assertFalse(block_device.is_swap_or_ephemeral('root'))\n self.assertFalse(block_device.is_swap_or_ephemeral('/dev/sda1'))\n\n def test_mappings_prepend_dev(self):\n mapping = [\n {'virtual': 'ami', 'device': '/dev/sda'},\n {'virtual': 'root', 'device': 'sda'},\n {'virtual': 'ephemeral0', 'device': 'sdb'},\n {'virtual': 'swap', 'device': 'sdc'},\n {'virtual': 'ephemeral1', 'device': 'sdd'},\n {'virtual': 'ephemeral2', 'device': 'sde'}]\n\n expected = [\n {'virtual': 'ami', 'device': '/dev/sda'},\n {'virtual': 'root', 'device': 'sda'},\n {'virtual': 'ephemeral0', 'device': '/dev/sdb'},\n {'virtual': 'swap', 'device': '/dev/sdc'},\n {'virtual': 'ephemeral1', 'device': '/dev/sdd'},\n {'virtual': 'ephemeral2', 'device': '/dev/sde'}]\n\n prepended = block_device.mappings_prepend_dev(mapping)\n self.assertEqual(sorted(expected, key=lambda v: v['virtual']),\n sorted(prepended, key=lambda v: v['virtual']))\n\n def test_strip_dev(self):\n self.assertEqual('sda', block_device.strip_dev('/dev/sda'))\n self.assertEqual('sda', block_device.strip_dev('sda'))\n self.assertIsNone(block_device.strip_dev(None))\n\n def test_strip_prefix(self):\n self.assertEqual('a', block_device.strip_prefix('/dev/sda'))\n self.assertEqual('a', block_device.strip_prefix('a'))\n self.assertEqual('a', block_device.strip_prefix('xvda'))\n self.assertEqual('a', block_device.strip_prefix('vda'))\n self.assertEqual('a', block_device.strip_prefix('hda'))\n self.assertIsNone(block_device.strip_prefix(None))\n\n def test_get_device_letter(self):\n self.assertEqual('', block_device.get_device_letter(''))\n self.assertEqual('a', block_device.get_device_letter('/dev/sda1'))\n self.assertEqual('b', block_device.get_device_letter('/dev/xvdb'))\n self.assertEqual('d', block_device.get_device_letter('/dev/d'))\n self.assertEqual('a', block_device.get_device_letter('a'))\n self.assertEqual('b', block_device.get_device_letter('sdb2'))\n self.assertEqual('c', block_device.get_device_letter('vdc'))\n self.assertEqual('c', block_device.get_device_letter('hdc'))\n self.assertIsNone(block_device.get_device_letter(None))\n\n def test_generate_device_name(self):\n expected = (\n ('vda', (\"vd\", 0)),\n ('vdaa', (\"vd\", 26)),\n ('vdabc', (\"vd\", 730)),\n ('vdidpok', (\"vd\", 4194304)),\n ('sdc', (\"sd\", 2)),\n ('sdaa', (\"sd\", 26)),\n ('sdiw', (\"sd\", 256)),\n ('hdzz', (\"hd\", 701))\n )\n for res, args in expected:\n self.assertEqual(res, block_device.generate_device_name(*args))\n\n def test_volume_in_mapping(self):\n swap = {'device_name': '/dev/sdb',\n 'swap_size': 1}\n ephemerals = [{'num': 0,\n 'virtual_name': 'ephemeral0',\n 'device_name': '/dev/sdc1',\n 'size': 1},\n {'num': 2,\n 'virtual_name': 'ephemeral2',\n 'device_name': '/dev/sdd',\n 'size': 1}]\n block_device_mapping = [{'mount_device': '/dev/sde',\n 'device_path': 'fake_device'},\n {'mount_device': '/dev/sdf',\n 'device_path': 'fake_device'}]\n block_device_info = {\n 'root_device_name': '/dev/sda',\n 'swap': swap,\n 'ephemerals': ephemerals,\n 'block_device_mapping': block_device_mapping}\n\n def _assert_volume_in_mapping(device_name, true_or_false):\n in_mapping = block_device.volume_in_mapping(\n device_name, block_device_info)\n self.assertEqual(true_or_false, in_mapping)\n\n _assert_volume_in_mapping('sda', False)\n _assert_volume_in_mapping('sdb', True)\n _assert_volume_in_mapping('sdc1', True)\n _assert_volume_in_mapping('sdd', True)\n _assert_volume_in_mapping('sde', True)\n _assert_volume_in_mapping('sdf', True)\n _assert_volume_in_mapping('sdg', False)\n _assert_volume_in_mapping('sdh1', False)\n\n def test_get_root_bdm(self):\n root_bdm = {'device_name': 'vda', 'boot_index': 0}\n bdms = [root_bdm,\n {'device_name': 'vdb', 'boot_index': 1},\n {'device_name': 'vdc', 'boot_index': -1},\n {'device_name': 'vdd'}]\n self.assertEqual(root_bdm, block_device.get_root_bdm(bdms))\n self.assertEqual(root_bdm, block_device.get_root_bdm([bdms[0]]))\n self.assertIsNone(block_device.get_root_bdm(bdms[1:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[2:]))\n self.assertIsNone(block_device.get_root_bdm(bdms[3:]))\n self.assertIsNone(block_device.get_root_bdm([]))\n\n def test_get_bdm_ephemeral_disk_size(self):\n size = block_device.get_bdm_ephemeral_disk_size(self.new_mapping)\n self.assertEqual(10, size)\n\n def test_get_bdm_swap_list(self):\n swap_list = block_device.get_bdm_swap_list(self.new_mapping)\n self.assertEqual(1, len(swap_list))\n self.assertEqual(1, swap_list[0].get('id'))\n\n def test_get_bdm_local_disk_num(self):\n size = block_device.get_bdm_local_disk_num(self.new_mapping)\n self.assertEqual(2, size)\n\n def test_new_format_is_swap(self):\n expected_results = [True, False, False, False, False]\n for expected, bdm in zip(expected_results, self.new_mapping):\n res = block_device.new_format_is_swap(bdm)\n self.assertEqual(expected, res)\n\n def test_new_format_is_ephemeral(self):\n expected_results = [False, True, False, False, False]\n for expected, bdm in zip(expected_results, self.new_mapping):\n res = block_device.new_format_is_ephemeral(bdm)\n self.assertEqual(expected, res)\n\n def test_validate_device_name(self):\n for value in [' ', 10, None, 'a' * 260]:\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.validate_device_name,\n value)\n\n def test_validate_and_default_volume_size(self):\n bdm = {}\n for value in [-1, 'a', 2.5]:\n bdm['volume_size'] = value\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.validate_and_default_volume_size,\n bdm)\n\n def test_get_bdms_to_connect(self):\n root_bdm = {'device_name': 'vda', 'boot_index': 0}\n bdms = [root_bdm,\n {'device_name': 'vdb', 'boot_index': 1},\n {'device_name': 'vdc', 'boot_index': -1},\n {'device_name': 'vde', 'boot_index': None},\n {'device_name': 'vdd'}]\n self.assertNotIn(root_bdm, block_device.get_bdms_to_connect(bdms,\n exclude_root_mapping=True))\n self.assertIn(root_bdm, block_device.get_bdms_to_connect(bdms))\n\n\nclass TestBlockDeviceDict(test.NoDBTestCase):\n def setUp(self):\n super(TestBlockDeviceDict, self).setUp()\n\n BDM = block_device.BlockDeviceDict\n\n self.api_mapping = [\n {'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'delete_on_termination': True,\n 'guest_format': 'swap',\n 'boot_index': -1},\n {'id': 2, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdc1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'delete_on_termination': True,\n 'boot_index': -1},\n {'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda1',\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'uuid': 'fake-volume-id-1',\n 'boot_index': 0},\n {'id': 4, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda2',\n 'source_type': 'snapshot',\n 'destination_type': 'volume',\n 'uuid': 'fake-snapshot-id-1',\n 'boot_index': -1},\n {'id': 5, 'instance_uuid': uuids.instance,\n 'no_device': True,\n 'device_name': '/dev/vdc'},\n ]\n\n self.new_mapping = [\n BDM({'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'delete_on_termination': True,\n 'guest_format': 'swap',\n 'boot_index': -1}),\n BDM({'id': 2, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdc1',\n 'source_type': 'blank',\n 'destination_type': 'local',\n 'delete_on_termination': True,\n 'boot_index': -1}),\n BDM({'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda1',\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'boot_index': 0}),\n BDM({'id': 4, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda2',\n 'source_type': 'snapshot',\n 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'snapshot_id': 'fake-snapshot-id-1',\n 'volume_id': 'fake-volume-id-2',\n 'boot_index': -1}),\n BDM({'id': 5, 'instance_uuid': uuids.instance,\n 'no_device': True,\n 'device_name': '/dev/vdc'}),\n ]\n\n self.legacy_mapping = [\n {'id': 1, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdb1',\n 'delete_on_termination': True,\n 'virtual_name': 'swap'},\n {'id': 2, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sdc1',\n 'delete_on_termination': True,\n 'virtual_name': 'ephemeral0'},\n {'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda1',\n 'volume_id': 'fake-volume-id-1',\n 'connection_info': \"{'fake': 'connection_info'}\"},\n {'id': 4, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda2',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'snapshot_id': 'fake-snapshot-id-1',\n 'volume_id': 'fake-volume-id-2'},\n {'id': 5, 'instance_uuid': uuids.instance,\n 'no_device': True,\n 'device_name': '/dev/vdc'},\n ]\n\n self.new_mapping_source_image = [\n BDM({'id': 6, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda3',\n 'source_type': 'image',\n 'destination_type': 'volume',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'volume_id': 'fake-volume-id-3',\n 'boot_index': -1}),\n BDM({'id': 7, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda4',\n 'source_type': 'image',\n 'destination_type': 'local',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'image_id': 'fake-image-id-2',\n 'boot_index': -1}),\n ]\n\n self.legacy_mapping_source_image = [\n {'id': 6, 'instance_uuid': uuids.instance,\n 'device_name': '/dev/sda3',\n 'connection_info': \"{'fake': 'connection_info'}\",\n 'volume_id': 'fake-volume-id-3'},\n ]\n\n def test_init(self):\n def fake_validate(obj, dct):\n pass\n\n self.stub_out('nova.block_device.BlockDeviceDict._fields',\n set(['field1', 'field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._db_only_fields',\n set(['db_field1', 'db_field2']))\n self.stub_out('nova.block_device.BlockDeviceDict._validate',\n fake_validate)\n\n # Make sure db fields are not picked up if they are not\n # in the original dict\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo',\n 'field2': 'bar',\n 'db_field1': 'baz'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n\n # Make sure all expected fields are defaulted\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'})\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n\n # Unless they are not meant to be\n dev_dict = block_device.BlockDeviceDict({'field1': 'foo'},\n do_not_default=set(['field2']))\n self.assertIn('field1', dev_dict)\n self.assertNotIn('field2', dev_dict)\n self.assertNotIn('db_field1', dev_dict)\n self.assertNotIn('db_field2', dev_dict)\n\n # Passing kwargs to constructor works\n dev_dict = block_device.BlockDeviceDict(field1='foo')\n self.assertIn('field1', dev_dict)\n self.assertIn('field2', dev_dict)\n self.assertIsNone(dev_dict['field2'])\n dev_dict = block_device.BlockDeviceDict(\n {'field1': 'foo'}, field2='bar')\n self.assertEqual('foo', dev_dict['field1'])\n self.assertEqual('bar', dev_dict['field2'])\n\n def test_init_prepend_dev_to_device_name(self):\n bdm = {'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': 'vda',\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1',\n 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vda', bdm_dict['device_name'])\n\n bdm['device_name'] = '/dev/vdb'\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertEqual('/dev/vdb', bdm_dict['device_name'])\n\n bdm['device_name'] = None\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertIsNone(bdm_dict['device_name'])\n\n def test_init_boolify_delete_on_termination(self):\n # Make sure that when delete_on_termination is not passed it's\n # still set to False and not None\n bdm = {'id': 3, 'instance_uuid': uuids.instance,\n 'device_name': 'vda',\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'volume_id': 'fake-volume-id-1',\n 'boot_index': 0}\n bdm_dict = block_device.BlockDeviceDict(bdm)\n self.assertFalse(bdm_dict['delete_on_termination'])\n\n def test_validate(self):\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n {'bogus_field': 'lame_val'})\n\n lame_bdm = dict(self.new_mapping[2])\n del lame_bdm['source_type']\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n lame_bdm)\n\n lame_bdm['no_device'] = True\n block_device.BlockDeviceDict(lame_bdm)\n\n lame_dev_bdm = dict(self.new_mapping[2])\n lame_dev_bdm['device_name'] = \"not a valid name\"\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n lame_dev_bdm)\n\n lame_dev_bdm['device_name'] = \"\"\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n lame_dev_bdm)\n\n cool_volume_size_bdm = dict(self.new_mapping[2])\n cool_volume_size_bdm['volume_size'] = '42'\n cool_volume_size_bdm = block_device.BlockDeviceDict(\n cool_volume_size_bdm)\n self.assertEqual(42, cool_volume_size_bdm['volume_size'])\n\n lame_volume_size_bdm = dict(self.new_mapping[2])\n lame_volume_size_bdm['volume_size'] = 'some_non_int_string'\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n lame_volume_size_bdm)\n\n truthy_bdm = dict(self.new_mapping[2])\n truthy_bdm['delete_on_termination'] = '1'\n truthy_bdm = block_device.BlockDeviceDict(truthy_bdm)\n self.assertTrue(truthy_bdm['delete_on_termination'])\n\n verbose_bdm = dict(self.new_mapping[2])\n verbose_bdm['boot_index'] = 'first'\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict,\n verbose_bdm)\n\n def test_from_legacy(self):\n for legacy, new in zip(self.legacy_mapping, self.new_mapping):\n self.assertThat(\n block_device.BlockDeviceDict.from_legacy(legacy),\n matchers.IsSubDictOf(new))\n\n def test_from_legacy_mapping(self):\n def _get_image_bdms(bdms):\n return [bdm for bdm in bdms if bdm['source_type'] == 'image']\n\n def _get_bootable_bdms(bdms):\n return [bdm for bdm in bdms\n if (bdm['boot_index'] is not None and\n bdm['boot_index'] >= 0)]\n\n new_no_img = block_device.from_legacy_mapping(self.legacy_mapping)\n self.assertEqual(0, len(_get_image_bdms(new_no_img)))\n\n for new, expected in zip(new_no_img, self.new_mapping):\n self.assertThat(new, matchers.IsSubDictOf(expected))\n\n new_with_img = block_device.from_legacy_mapping(\n self.legacy_mapping, 'fake_image_ref')\n image_bdms = _get_image_bdms(new_with_img)\n boot_bdms = _get_bootable_bdms(new_with_img)\n self.assertEqual(1, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, image_bdms[0]['boot_index'])\n self.assertEqual('image', boot_bdms[0]['source_type'])\n\n new_with_img_and_root = block_device.from_legacy_mapping(\n self.legacy_mapping, 'fake_image_ref', 'sda1')\n image_bdms = _get_image_bdms(new_with_img_and_root)\n boot_bdms = _get_bootable_bdms(new_with_img_and_root)\n self.assertEqual(0, len(image_bdms))\n self.assertEqual(1, len(boot_bdms))\n self.assertEqual(0, boot_bdms[0]['boot_index'])\n self.assertEqual('volume', boot_bdms[0]['source_type'])\n\n new_no_root = block_device.from_legacy_mapping(\n self.legacy_mapping, 'fake_image_ref', 'sda1', no_root=True)\n self.assertEqual(0, len(_get_image_bdms(new_no_root)))\n self.assertEqual(0, len(_get_bootable_bdms(new_no_root)))\n\n def test_from_api(self):\n for api, new in zip(self.api_mapping, self.new_mapping):\n new['connection_info'] = None\n if new['snapshot_id']:\n new['volume_id'] = None\n self.assertThat(\n block_device.BlockDeviceDict.from_api(api, False),\n matchers.IsSubDictOf(new))\n\n def test_from_api_invalid_blank_id(self):\n api_dict = {'id': 1,\n 'source_type': 'blank',\n 'destination_type': 'volume',\n 'uuid': 'fake-volume-id-1',\n 'delete_on_termination': True,\n 'boot_index': -1}\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict.from_api, api_dict,\n False)\n\n def test_from_api_invalid_source_to_local_mapping(self):\n api_dict = {'id': 1,\n 'source_type': 'image',\n 'destination_type': 'local',\n 'uuid': 'fake-volume-id-1'}\n self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict.from_api, api_dict,\n False)\n\n def test_from_api_valid_source_to_local_mapping(self):\n api_dict = {'id': 1,\n 'source_type': 'image',\n 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1',\n 'uuid': 1,\n 'boot_index': 0}\n\n retexp = block_device.BlockDeviceDict(\n {'id': 1,\n 'source_type': 'image',\n 'image_id': 1,\n 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1',\n 'boot_index': 0})\n self.assertEqual(retexp,\n block_device.BlockDeviceDict.from_api(api_dict, True))\n\n def test_from_api_valid_source_to_local_mapping_with_string_bi(self):\n api_dict = {'id': 1,\n 'source_type': 'image',\n 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1',\n 'uuid': 1,\n 'boot_index': '0'}\n\n retexp = block_device.BlockDeviceDict(\n {'id': 1,\n 'source_type': 'image',\n 'image_id': 1,\n 'destination_type': 'local',\n 'volume_id': 'fake-volume-id-1',\n 'boot_index': 0})\n self.assertEqual(retexp,\n block_device.BlockDeviceDict.from_api(api_dict, True))\n\n def test_from_api_invalid_image_to_destination_local_mapping(self):\n api_dict = {'id': 1,\n 'source_type': 'image',\n 'destination_type': 'local',\n 'uuid': 'fake-volume-id-1',\n 'volume_type': 'fake-lvm-1',\n 'boot_index': 1}\n ex = self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict.from_api,\n api_dict, False)\n self.assertIn('Mapping image to local is not supported', str(ex))\n\n def test_from_api_invalid_volume_type_to_destination_local_mapping(self):\n api_dict = {'id': 1,\n 'source_type': 'volume',\n 'destination_type': 'local',\n 'uuid': 'fake-volume-id-1',\n 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict.from_api,\n api_dict, False)\n self.assertIn('Specifying a volume_type with destination_type=local '\n 'is not supported', str(ex))\n\n def test_from_api_invalid_specify_volume_type_with_source_volume_mapping(\n self):\n api_dict = {'id': 1,\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'uuid': 'fake-volume-id-1',\n 'volume_type': 'fake-lvm-1'}\n ex = self.assertRaises(exception.InvalidBDMFormat,\n block_device.BlockDeviceDict.from_api,\n api_dict, False)\n self.assertIn('Specifying volume type to existing volume is '\n 'not supported', str(ex))\n\n def test_image_mapping(self):\n removed_fields = ['id', 'instance_uuid', 'connection_info',\n 'created_at', 'updated_at', 'deleted_at', 'deleted']\n for bdm in self.new_mapping:\n mapping_bdm = fake_block_device.FakeDbBlockDeviceDict(\n bdm).get_image_mapping()\n for fld in removed_fields:\n self.assertNotIn(fld, mapping_bdm)\n\n def _test_snapshot_from_bdm(self, template):\n snapshot = block_device.snapshot_from_bdm('new-snapshot-id', template)\n self.assertEqual('new-snapshot-id', snapshot['snapshot_id'])\n self.assertEqual('snapshot', snapshot['source_type'])\n self.assertEqual('volume', snapshot['destination_type'])\n self.assertEqual(template.volume_size, snapshot['volume_size'])\n self.assertEqual(template.delete_on_termination,\n snapshot['delete_on_termination'])\n self.assertEqual(template.device_name, snapshot['device_name'])\n for key in ['disk_bus', 'device_type', 'boot_index']:\n self.assertEqual(template[key], snapshot[key])\n\n def test_snapshot_from_bdm(self):\n for bdm in self.new_mapping:\n self._test_snapshot_from_bdm(objects.BlockDeviceMapping(**bdm))\n\n def test_snapshot_from_object(self):\n for bdm in self.new_mapping[:-1]:\n obj = objects.BlockDeviceMapping()\n obj = objects.BlockDeviceMapping._from_db_object(\n None, obj, fake_block_device.FakeDbBlockDeviceDict(\n bdm))\n self._test_snapshot_from_bdm(obj)\n\n\nclass GetBDMImageMetadataTestCase(test.NoDBTestCase):\n\n def setUp(self):\n super().setUp()\n self.compute_api = compute_api.API()\n self.context = context.RequestContext('fake', 'fake')\n\n def _test_get_bdm_image_metadata__bootable(self, is_bootable=False):\n block_device_mapping = [{\n 'id': 1,\n 'device_name': 'vda',\n 'no_device': None,\n 'virtual_name': None,\n 'snapshot_id': None,\n 'volume_id': '1',\n 'delete_on_termination': False,\n }]\n\n expected_meta = {\n 'min_disk': 0, 'min_ram': 0, 'properties': {}, 'size': 0,\n 'status': 'active',\n }\n\n def get_vol_data(*args, **kwargs):\n return {'bootable': is_bootable}\n\n with mock.patch.object(\n self.compute_api.volume_api, 'get', side_effect=get_vol_data,\n ):\n if not is_bootable:\n self.assertRaises(\n exception.InvalidBDMVolumeNotBootable,\n block_device.get_bdm_image_metadata,\n self.context,\n self.compute_api.image_api,\n self.compute_api.volume_api,\n block_device_mapping)\n else:\n meta = block_device.get_bdm_image_metadata(\n self.context, self.compute_api.image_api,\n self.compute_api.volume_api, block_device_mapping)\n self.assertEqual(expected_meta, meta)\n\n def test_get_bdm_image_metadata__non_bootable(self):\n self._test_get_bdm_image_metadata__bootable(False)\n\n def test_get_bdm_image_metadata__bootable(self):\n self._test_get_bdm_image_metadata__bootable(True)\n\n def test_get_bdm_image_metadata__basic_property(self):\n block_device_mapping = [{\n 'id': 1,\n 'device_name': 'vda',\n 'no_device': None,\n 'virtual_name': None,\n 'snapshot_id': None,\n 'volume_id': '1',\n 'delete_on_termination': False,\n }]\n fake_volume = {\n 'volume_image_metadata': {\n 'min_ram': 256, 'min_disk': 128, 'foo': 'bar',\n },\n }\n with mock.patch.object(\n self.compute_api.volume_api, 'get', return_value=fake_volume,\n ):\n meta = block_device.get_bdm_image_metadata(\n self.context, self.compute_api.image_api,\n self.compute_api.volume_api, block_device_mapping)\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n\n def test_get_bdm_image_metadata__snapshot_basic_property(self):\n block_device_mapping = [{\n 'id': 1,\n 'device_name': 'vda',\n 'no_device': None,\n 'virtual_name': None,\n 'snapshot_id': '2',\n 'volume_id': None,\n 'delete_on_termination': False,\n }]\n fake_volume = {\n 'volume_image_metadata': {\n 'min_ram': 256, 'min_disk': 128, 'foo': 'bar',\n },\n }\n fake_snapshot = {'volume_id': '1'}\n with test.nested(\n mock.patch.object(\n self.compute_api.volume_api, 'get',\n return_value=fake_volume),\n mock.patch.object(\n self.compute_api.volume_api, 'get_snapshot',\n return_value=fake_snapshot),\n ) as (volume_get, volume_get_snapshot):\n meta = block_device.get_bdm_image_metadata(\n self.context, self.compute_api.image_api,\n self.compute_api.volume_api, block_device_mapping)\n\n self.assertEqual(256, meta['min_ram'])\n self.assertEqual(128, meta['min_disk'])\n self.assertEqual('active', meta['status'])\n self.assertEqual('bar', meta['properties']['foo'])\n volume_get_snapshot.assert_called_once_with(\n self.context, block_device_mapping[0]['snapshot_id'])\n volume_get.assert_called_once_with(\n self.context, fake_snapshot['volume_id'])\n\n @mock.patch.object(\n cinder.API, 'get',\n side_effect=exception.CinderConnectionFailed(reason='error'))\n def test_get_bdm_image_metadata__cinder_down(self, mock_get):\n bdms = [\n objects.BlockDeviceMapping(\n **fake_block_device.FakeDbBlockDeviceDict({\n 'id': 1,\n 'volume_id': 1,\n 'source_type': 'volume',\n 'destination_type': 'volume',\n 'device_name': 'vda',\n })\n )\n ]\n self.assertRaises(\n exception.CinderConnectionFailed,\n block_device.get_bdm_image_metadata,\n self.context,\n self.compute_api.image_api,\n self.compute_api.volume_api,\n bdms, legacy_bdm=True)\n\n\nclass GetImageMetadataFromVolumeTestCase(test.NoDBTestCase):\n def test_inherit_image_properties(self):\n properties = {'fake_prop': 'fake_value'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(properties, image_meta['properties'])\n\n def test_image_size(self):\n volume = {'size': 10}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(10 * units.Gi, image_meta['size'])\n\n def test_image_status(self):\n volume = {}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual('active', image_meta['status'])\n\n def test_values_conversion(self):\n properties = {'min_ram': '5', 'min_disk': '7'}\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual(5, image_meta['min_ram'])\n self.assertEqual(7, image_meta['min_disk'])\n\n def test_suppress_not_image_properties(self):\n properties = {\n 'min_ram': '256', 'min_disk': '128', 'image_id': 'fake_id',\n 'image_name': 'fake_name', 'container_format': 'ami',\n 'disk_format': 'ami', 'size': '1234', 'checksum': 'fake_checksum',\n }\n volume = {'volume_image_metadata': properties}\n image_meta = block_device.get_image_metadata_from_volume(volume)\n self.assertEqual({}, image_meta['properties'])\n self.assertEqual(0, image_meta['size'])\n # volume's properties should not be touched\n self.assertNotEqual({}, properties)\n",
"step-ids": [
37,
38,
43,
46,
55
]
}
|
[
37,
38,
43,
46,
55
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
from .tokening import sign_profile_tokens, validate_token_record, get_profile_from_tokens
from .zone_file import create_zone_file
from .legacy import is_profile_legacy_format, get_person_from_legacy_format
<|reserved_special_token_1|>
from .tokening import sign_profile_tokens, validate_token_record, \
get_profile_from_tokens
from .zone_file import create_zone_file
from .legacy import is_profile_legacy_format, get_person_from_legacy_format
|
flexible
|
{
"blob_id": "de24b341102f5979cc48b22c3a07d42915b6dd18",
"index": 7146,
"step-1": "<mask token>\n",
"step-2": "from .tokening import sign_profile_tokens, validate_token_record, get_profile_from_tokens\nfrom .zone_file import create_zone_file\nfrom .legacy import is_profile_legacy_format, get_person_from_legacy_format\n",
"step-3": "from .tokening import sign_profile_tokens, validate_token_record, \\\n get_profile_from_tokens\nfrom .zone_file import create_zone_file\nfrom .legacy import is_profile_legacy_format, get_person_from_legacy_format \n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_auth_passwordreset_reset1():
register = auth_register('[email protected]', 'Hello123',
'First', 'Last')
auth_passwordreset_request('[email protected]')
with pytest.raises(ValueError, match='*Incorrect Reset Code*'):
auth_passwordreset_reset('ABS124', 'SomePass')
<|reserved_special_token_0|>
def test_auth_passwordreset_reset3():
register = auth_register('[email protected]', 'Hello123',
'First', 'Last')
auth_passwordreset_request('[email protected]')
auth_passwordreset_reset('AUW624', 'Valispass12')
assert new_user_password == 'Valispass12'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_auth_passwordreset_reset1():
register = auth_register('[email protected]', 'Hello123',
'First', 'Last')
auth_passwordreset_request('[email protected]')
with pytest.raises(ValueError, match='*Incorrect Reset Code*'):
auth_passwordreset_reset('ABS124', 'SomePass')
def test_auth_passwordreset_reset2():
register = auth_register('[email protected]', 'Hello123',
'First', 'Last')
auth_passwordreset_request('[email protected]')
with pytest.raises(ValueError, match='*Invalid Password Length*'):
auth_passwordreset_reset('AUW624', '')
auth_passwordreset_reset('AUW624', 'nope')
def test_auth_passwordreset_reset3():
register = auth_register('[email protected]', 'Hello123',
'First', 'Last')
auth_passwordreset_request('[email protected]')
auth_passwordreset_reset('AUW624', 'Valispass12')
assert new_user_password == 'Valispass12'
<|reserved_special_token_1|>
from auth_passwordreset_reset import auth_passwordreset_reset
from auth_register import auth_register
from data import *
import pytest
def test_auth_passwordreset_reset1():
register = auth_register('[email protected]', 'Hello123',
'First', 'Last')
auth_passwordreset_request('[email protected]')
with pytest.raises(ValueError, match='*Incorrect Reset Code*'):
auth_passwordreset_reset('ABS124', 'SomePass')
def test_auth_passwordreset_reset2():
register = auth_register('[email protected]', 'Hello123',
'First', 'Last')
auth_passwordreset_request('[email protected]')
with pytest.raises(ValueError, match='*Invalid Password Length*'):
auth_passwordreset_reset('AUW624', '')
auth_passwordreset_reset('AUW624', 'nope')
def test_auth_passwordreset_reset3():
register = auth_register('[email protected]', 'Hello123',
'First', 'Last')
auth_passwordreset_request('[email protected]')
auth_passwordreset_reset('AUW624', 'Valispass12')
assert new_user_password == 'Valispass12'
<|reserved_special_token_1|>
from auth_passwordreset_reset import auth_passwordreset_reset
from auth_register import auth_register
from data import *
import pytest
#invalid reset code
def test_auth_passwordreset_reset1():
#create a test account
register = auth_register("[email protected]", "Hello123", "First", "Last")
#call password reset request
auth_passwordreset_request("[email protected]")
#assuming that the code from the email was "WER123"
#this should not work as the code "ABS124" doesnt match "WER123"
with pytest.raises(ValueError, match='*Incorrect Reset Code*'):
auth_passwordreset_reset("ABS124", "SomePass")
#invalid password
def test_auth_passwordreset_reset2():
#create a test account
register = auth_register("[email protected]", "Hello123", "First", "Last")
#call password reset request
auth_passwordreset_request("[email protected]")
#assume that the code generated was "AUW624"
#these should not work as the new passowrd lengths are <5
with pytest.raises(ValueError, match='*Invalid Password Length*'):
auth_passwordreset_reset("AUW624", "")
auth_passwordreset_reset("AUW624", "nope")
#valid case
def test_auth_passwordreset_reset3():
#create a test account
register = auth_register("[email protected]", "Hello123", "First", "Last")
#call password reset request
auth_passwordreset_request("[email protected]")
#assume that the code generated was "AUW624"
auth_passwordreset_reset("AUW624", "Valispass12")
#test to see if password updated
assert new_user_password == "Valispass12"
#this sequence should successfully reset the password
|
flexible
|
{
"blob_id": "a315d01f0fb16f0c74c447c07b76f33e6ff6427d",
"index": 9742,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_auth_passwordreset_reset1():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Incorrect Reset Code*'):\n auth_passwordreset_reset('ABS124', 'SomePass')\n\n\n<mask token>\n\n\ndef test_auth_passwordreset_reset3():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n auth_passwordreset_reset('AUW624', 'Valispass12')\n assert new_user_password == 'Valispass12'\n",
"step-3": "<mask token>\n\n\ndef test_auth_passwordreset_reset1():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Incorrect Reset Code*'):\n auth_passwordreset_reset('ABS124', 'SomePass')\n\n\ndef test_auth_passwordreset_reset2():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Invalid Password Length*'):\n auth_passwordreset_reset('AUW624', '')\n auth_passwordreset_reset('AUW624', 'nope')\n\n\ndef test_auth_passwordreset_reset3():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n auth_passwordreset_reset('AUW624', 'Valispass12')\n assert new_user_password == 'Valispass12'\n",
"step-4": "from auth_passwordreset_reset import auth_passwordreset_reset\nfrom auth_register import auth_register\nfrom data import *\nimport pytest\n\n\ndef test_auth_passwordreset_reset1():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Incorrect Reset Code*'):\n auth_passwordreset_reset('ABS124', 'SomePass')\n\n\ndef test_auth_passwordreset_reset2():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n with pytest.raises(ValueError, match='*Invalid Password Length*'):\n auth_passwordreset_reset('AUW624', '')\n auth_passwordreset_reset('AUW624', 'nope')\n\n\ndef test_auth_passwordreset_reset3():\n register = auth_register('[email protected]', 'Hello123',\n 'First', 'Last')\n auth_passwordreset_request('[email protected]')\n auth_passwordreset_reset('AUW624', 'Valispass12')\n assert new_user_password == 'Valispass12'\n",
"step-5": "from auth_passwordreset_reset import auth_passwordreset_reset\nfrom auth_register import auth_register\nfrom data import *\nimport pytest\n\n\n#invalid reset code\ndef test_auth_passwordreset_reset1():\n \n #create a test account\n register = auth_register(\"[email protected]\", \"Hello123\", \"First\", \"Last\")\n \n #call password reset request\n auth_passwordreset_request(\"[email protected]\")\n \n #assuming that the code from the email was \"WER123\"\n \n #this should not work as the code \"ABS124\" doesnt match \"WER123\"\n with pytest.raises(ValueError, match='*Incorrect Reset Code*'):\n auth_passwordreset_reset(\"ABS124\", \"SomePass\")\n \n#invalid password\ndef test_auth_passwordreset_reset2():\n\n #create a test account\n register = auth_register(\"[email protected]\", \"Hello123\", \"First\", \"Last\")\n \n #call password reset request\n auth_passwordreset_request(\"[email protected]\")\n \n #assume that the code generated was \"AUW624\"\n \n #these should not work as the new passowrd lengths are <5\n with pytest.raises(ValueError, match='*Invalid Password Length*'):\n auth_passwordreset_reset(\"AUW624\", \"\")\n auth_passwordreset_reset(\"AUW624\", \"nope\")\n \n#valid case\ndef test_auth_passwordreset_reset3():\n \n #create a test account\n register = auth_register(\"[email protected]\", \"Hello123\", \"First\", \"Last\")\n \n #call password reset request\n auth_passwordreset_request(\"[email protected]\")\n \n #assume that the code generated was \"AUW624\"\n auth_passwordreset_reset(\"AUW624\", \"Valispass12\") \n \n #test to see if password updated\n assert new_user_password == \"Valispass12\"\n #this sequence should successfully reset the password\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
planet_list = ["Mercury", "Mars"]
planet_list.append("Jupiter")
planet_list.append("Saturn")
planet_list.extend(["Uranus", "Neptune"])
planet_list.insert(1, "Earth")
planet_list.insert(1, "Venus")
planet_list.append("Pluto")
del planet_list[-1]
print(planet_list)
|
normal
|
{
"blob_id": "1280ab66b817011e22e560a78104bbc4340989e7",
"index": 8495,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nplanet_list.append('Jupiter')\nplanet_list.append('Saturn')\nplanet_list.extend(['Uranus', 'Neptune'])\nplanet_list.insert(1, 'Earth')\nplanet_list.insert(1, 'Venus')\nplanet_list.append('Pluto')\ndel planet_list[-1]\nprint(planet_list)\n",
"step-3": "planet_list = ['Mercury', 'Mars']\nplanet_list.append('Jupiter')\nplanet_list.append('Saturn')\nplanet_list.extend(['Uranus', 'Neptune'])\nplanet_list.insert(1, 'Earth')\nplanet_list.insert(1, 'Venus')\nplanet_list.append('Pluto')\ndel planet_list[-1]\nprint(planet_list)\n",
"step-4": "planet_list = [\"Mercury\", \"Mars\"]\n\nplanet_list.append(\"Jupiter\")\nplanet_list.append(\"Saturn\")\nplanet_list.extend([\"Uranus\", \"Neptune\"])\nplanet_list.insert(1, \"Earth\")\nplanet_list.insert(1, \"Venus\")\nplanet_list.append(\"Pluto\")\ndel planet_list[-1]\n\nprint(planet_list)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#This is a file from CS50 Finance
from functools import wraps
from flask import redirect, render_template, session
from threading import Thread
from flask_mail import Message
from application import app, mail
ALLOWED_EXTENSIONS = {"png", "PNG", "jpg", "jpeg", "JPG", "JPEG"}
def login_required(f):
"""
Decorate routes to require login.
http://flask.pocoo.org/docs/1.0/patterns/viewdecorators/
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get("user_id") is None:
return redirect("/sign_in")
return f(*args, **kwargs)
return decorated_function
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
# Send message function
def async_send_mail(applic, msg):
with applic.app_context():
mail.send(msg)
def send_mail(subject, recipient, template, **kwargs):
msg = Message(subject, recipients=[recipient])
msg.html = render_template(template, **kwargs)
thr = Thread(target=async_send_mail, args=[app, msg])
thr.start()
return thr
|
normal
|
{
"blob_id": "1a4da621add157fa6d1f578370d64594b102eeb5",
"index": 4245,
"step-1": "<mask token>\n\n\ndef login_required(f):\n \"\"\"\n Decorate routes to require login.\n\n http://flask.pocoo.org/docs/1.0/patterns/viewdecorators/\n \"\"\"\n\n @wraps(f)\n def decorated_function(*args, **kwargs):\n if session.get('user_id') is None:\n return redirect('/sign_in')\n return f(*args, **kwargs)\n return decorated_function\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS\n\n\n<mask token>\n\n\ndef send_mail(subject, recipient, template, **kwargs):\n msg = Message(subject, recipients=[recipient])\n msg.html = render_template(template, **kwargs)\n thr = Thread(target=async_send_mail, args=[app, msg])\n thr.start()\n return thr\n",
"step-2": "<mask token>\n\n\ndef login_required(f):\n \"\"\"\n Decorate routes to require login.\n\n http://flask.pocoo.org/docs/1.0/patterns/viewdecorators/\n \"\"\"\n\n @wraps(f)\n def decorated_function(*args, **kwargs):\n if session.get('user_id') is None:\n return redirect('/sign_in')\n return f(*args, **kwargs)\n return decorated_function\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS\n\n\ndef async_send_mail(applic, msg):\n with applic.app_context():\n mail.send(msg)\n\n\ndef send_mail(subject, recipient, template, **kwargs):\n msg = Message(subject, recipients=[recipient])\n msg.html = render_template(template, **kwargs)\n thr = Thread(target=async_send_mail, args=[app, msg])\n thr.start()\n return thr\n",
"step-3": "<mask token>\nALLOWED_EXTENSIONS = {'png', 'PNG', 'jpg', 'jpeg', 'JPG', 'JPEG'}\n\n\ndef login_required(f):\n \"\"\"\n Decorate routes to require login.\n\n http://flask.pocoo.org/docs/1.0/patterns/viewdecorators/\n \"\"\"\n\n @wraps(f)\n def decorated_function(*args, **kwargs):\n if session.get('user_id') is None:\n return redirect('/sign_in')\n return f(*args, **kwargs)\n return decorated_function\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS\n\n\ndef async_send_mail(applic, msg):\n with applic.app_context():\n mail.send(msg)\n\n\ndef send_mail(subject, recipient, template, **kwargs):\n msg = Message(subject, recipients=[recipient])\n msg.html = render_template(template, **kwargs)\n thr = Thread(target=async_send_mail, args=[app, msg])\n thr.start()\n return thr\n",
"step-4": "from functools import wraps\nfrom flask import redirect, render_template, session\nfrom threading import Thread\nfrom flask_mail import Message\nfrom application import app, mail\nALLOWED_EXTENSIONS = {'png', 'PNG', 'jpg', 'jpeg', 'JPG', 'JPEG'}\n\n\ndef login_required(f):\n \"\"\"\n Decorate routes to require login.\n\n http://flask.pocoo.org/docs/1.0/patterns/viewdecorators/\n \"\"\"\n\n @wraps(f)\n def decorated_function(*args, **kwargs):\n if session.get('user_id') is None:\n return redirect('/sign_in')\n return f(*args, **kwargs)\n return decorated_function\n\n\ndef allowed_file(filename):\n return '.' in filename and filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS\n\n\ndef async_send_mail(applic, msg):\n with applic.app_context():\n mail.send(msg)\n\n\ndef send_mail(subject, recipient, template, **kwargs):\n msg = Message(subject, recipients=[recipient])\n msg.html = render_template(template, **kwargs)\n thr = Thread(target=async_send_mail, args=[app, msg])\n thr.start()\n return thr\n",
"step-5": "#This is a file from CS50 Finance\nfrom functools import wraps\n\nfrom flask import redirect, render_template, session\nfrom threading import Thread\nfrom flask_mail import Message\nfrom application import app, mail\n\nALLOWED_EXTENSIONS = {\"png\", \"PNG\", \"jpg\", \"jpeg\", \"JPG\", \"JPEG\"}\n\ndef login_required(f):\n \"\"\"\n Decorate routes to require login.\n\n http://flask.pocoo.org/docs/1.0/patterns/viewdecorators/\n \"\"\"\n @wraps(f)\n def decorated_function(*args, **kwargs):\n if session.get(\"user_id\") is None:\n return redirect(\"/sign_in\")\n return f(*args, **kwargs)\n return decorated_function\n\ndef allowed_file(filename):\n return '.' in filename and \\\n filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS\n\n# Send message function\ndef async_send_mail(applic, msg):\n with applic.app_context():\n mail.send(msg)\n\ndef send_mail(subject, recipient, template, **kwargs):\n msg = Message(subject, recipients=[recipient])\n msg.html = render_template(template, **kwargs)\n thr = Thread(target=async_send_mail, args=[app, msg])\n thr.start()\n return thr",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def n(T):
k = 1.38065e-23
c = 300000000.0
N = 100
a = h * c / (lam2 * k * T)
b = h * c / (lam1 * k * T)
x, w = gaussxwab(N, a, b)
s = 0.0
for k in range(N):
s += w[k] * (x[k] ** 3 / (exp(x[k]) - 1))
s = s * (15 / (pi * pi * pi * pi))
return s
if part in ['a'] or ['b']:
lol = linspace(0, 7000, 7000)
for i in range(len(T)):
print('i =', i)
lol = n(T[i])
plot(T[i], lol, 'k-')
show()
if part in ['b']:
z = (1 + sqrt(5)) / 2
accuracy = 1e-06
x1 = 1 / 10
x4 = 1 * 10
x2 = x4 - (x4 - x1) / z
x3 = x1 + (x4 - x1) / z
f1 = n(x1)
f2 = n(x2)
f3 = n(x3)
f4 = n(x4)
while x4 - x1 > accuracy:
if f2 < f3:
x4, f4 = x3, f3
x3, f3 = x2, f2
x2 = x4 - (x4 - x1) / z
f2 = n(x2)
else:
x1, f1 = x2, f2
x2, f2 = x3, f3
x3 = x1 - (x4 - x1) / z
f3 = n(x3)
print('minimum falls at', 0.5 * (x1 + x4), 'K')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
k = 1.38065e-23
h = 6.626e-34
lam1 = 3.9e-07
lam2 = 7.5e-07
c = 300000000.0
T = linspace(300, 10000, 7000)
part = str(input('what part would you like to do? (a, b, or c) '))
def n(T):
k = 1.38065e-23
c = 300000000.0
N = 100
a = h * c / (lam2 * k * T)
b = h * c / (lam1 * k * T)
x, w = gaussxwab(N, a, b)
s = 0.0
for k in range(N):
s += w[k] * (x[k] ** 3 / (exp(x[k]) - 1))
s = s * (15 / (pi * pi * pi * pi))
return s
if part in ['a'] or ['b']:
lol = linspace(0, 7000, 7000)
for i in range(len(T)):
print('i =', i)
lol = n(T[i])
plot(T[i], lol, 'k-')
show()
if part in ['b']:
z = (1 + sqrt(5)) / 2
accuracy = 1e-06
x1 = 1 / 10
x4 = 1 * 10
x2 = x4 - (x4 - x1) / z
x3 = x1 + (x4 - x1) / z
f1 = n(x1)
f2 = n(x2)
f3 = n(x3)
f4 = n(x4)
while x4 - x1 > accuracy:
if f2 < f3:
x4, f4 = x3, f3
x3, f3 = x2, f2
x2 = x4 - (x4 - x1) / z
f2 = n(x2)
else:
x1, f1 = x2, f2
x2, f2 = x3, f3
x3 = x1 - (x4 - x1) / z
f3 = n(x3)
print('minimum falls at', 0.5 * (x1 + x4), 'K')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
from gaussxw import gaussxwab
from numpy import linspace, arange
from pylab import plot, show, xlabel, ylabel
from math import pi, exp, sqrt
k = 1.38065e-23
h = 6.626e-34
lam1 = 3.9e-07
lam2 = 7.5e-07
c = 300000000.0
T = linspace(300, 10000, 7000)
part = str(input('what part would you like to do? (a, b, or c) '))
def n(T):
k = 1.38065e-23
c = 300000000.0
N = 100
a = h * c / (lam2 * k * T)
b = h * c / (lam1 * k * T)
x, w = gaussxwab(N, a, b)
s = 0.0
for k in range(N):
s += w[k] * (x[k] ** 3 / (exp(x[k]) - 1))
s = s * (15 / (pi * pi * pi * pi))
return s
if part in ['a'] or ['b']:
lol = linspace(0, 7000, 7000)
for i in range(len(T)):
print('i =', i)
lol = n(T[i])
plot(T[i], lol, 'k-')
show()
if part in ['b']:
z = (1 + sqrt(5)) / 2
accuracy = 1e-06
x1 = 1 / 10
x4 = 1 * 10
x2 = x4 - (x4 - x1) / z
x3 = x1 + (x4 - x1) / z
f1 = n(x1)
f2 = n(x2)
f3 = n(x3)
f4 = n(x4)
while x4 - x1 > accuracy:
if f2 < f3:
x4, f4 = x3, f3
x3, f3 = x2, f2
x2 = x4 - (x4 - x1) / z
f2 = n(x2)
else:
x1, f1 = x2, f2
x2, f2 = x3, f3
x3 = x1 - (x4 - x1) / z
f3 = n(x3)
print('minimum falls at', 0.5 * (x1 + x4), 'K')
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 29 15:10:34 2018
@author: nit_n
"""
from gaussxw import gaussxwab
from numpy import linspace, arange
from pylab import plot, show, xlabel, ylabel
from math import pi, exp, sqrt
k = 1.38065e-23 # joules/kelvin
h = 6.626e-34 # joules
lam1 = 390e-9 # meters
lam2 = 750e-9 # meters
c = 3e8 # meters/second
T = linspace(300, 10000, 7000)
part = str(input("what part would you like to do? (a, b, or c) "))
def n(T):
k = 1.38065e-23 # joules/kelvin
c = 3e8 # meters/second
N = 100
a = h*c/(lam2*k*T)
b = h*c/(lam1*k*T)
x,w = gaussxwab(N,a,b)
s = 0.0
for k in range(N):
s += w[k]*(x[k]**3/(exp(x[k])-1))
s = s*(15/(pi*pi*pi*pi))
return s
if part in ['a'] or ['b']:
lol = linspace(0, 7000, 7000)
for i in range(len(T)):
print("i =",i)
lol = n(T[i])
plot(T[i], lol, 'k-')
show()
if part in ['b']:
z = (1 + sqrt(5))/2
accuracy = 1e-6
x1 = 1/10
x4 = 1*10
x2 = x4 - (x4 - x1)/z
x3 = x1 + (x4 - x1)/z
f1 = n(x1)
f2 = n(x2)
f3 = n(x3)
f4 = n(x4)
while x4-x1>accuracy:
if f2<f3:
x4,f4 = x3,f3
x3,f3 = x2,f2
x2 = x4 - (x4-x1)/z
f2 = n(x2)
else:
x1,f1 = x2,f2
x2,f2 = x3,f3
x3 = x1 - (x4-x1)/z
f3 = n(x3)
print("minimum falls at", 0.5*(x1+x4),"K")
|
flexible
|
{
"blob_id": "9b88a3976d522bdfd38502e29eefc1f1a0c29ed2",
"index": 2884,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef n(T):\n k = 1.38065e-23\n c = 300000000.0\n N = 100\n a = h * c / (lam2 * k * T)\n b = h * c / (lam1 * k * T)\n x, w = gaussxwab(N, a, b)\n s = 0.0\n for k in range(N):\n s += w[k] * (x[k] ** 3 / (exp(x[k]) - 1))\n s = s * (15 / (pi * pi * pi * pi))\n return s\n\n\nif part in ['a'] or ['b']:\n lol = linspace(0, 7000, 7000)\n for i in range(len(T)):\n print('i =', i)\n lol = n(T[i])\n plot(T[i], lol, 'k-')\n show()\nif part in ['b']:\n z = (1 + sqrt(5)) / 2\n accuracy = 1e-06\n x1 = 1 / 10\n x4 = 1 * 10\n x2 = x4 - (x4 - x1) / z\n x3 = x1 + (x4 - x1) / z\n f1 = n(x1)\n f2 = n(x2)\n f3 = n(x3)\n f4 = n(x4)\n while x4 - x1 > accuracy:\n if f2 < f3:\n x4, f4 = x3, f3\n x3, f3 = x2, f2\n x2 = x4 - (x4 - x1) / z\n f2 = n(x2)\n else:\n x1, f1 = x2, f2\n x2, f2 = x3, f3\n x3 = x1 - (x4 - x1) / z\n f3 = n(x3)\n print('minimum falls at', 0.5 * (x1 + x4), 'K')\n",
"step-3": "<mask token>\nk = 1.38065e-23\nh = 6.626e-34\nlam1 = 3.9e-07\nlam2 = 7.5e-07\nc = 300000000.0\nT = linspace(300, 10000, 7000)\npart = str(input('what part would you like to do? (a, b, or c) '))\n\n\ndef n(T):\n k = 1.38065e-23\n c = 300000000.0\n N = 100\n a = h * c / (lam2 * k * T)\n b = h * c / (lam1 * k * T)\n x, w = gaussxwab(N, a, b)\n s = 0.0\n for k in range(N):\n s += w[k] * (x[k] ** 3 / (exp(x[k]) - 1))\n s = s * (15 / (pi * pi * pi * pi))\n return s\n\n\nif part in ['a'] or ['b']:\n lol = linspace(0, 7000, 7000)\n for i in range(len(T)):\n print('i =', i)\n lol = n(T[i])\n plot(T[i], lol, 'k-')\n show()\nif part in ['b']:\n z = (1 + sqrt(5)) / 2\n accuracy = 1e-06\n x1 = 1 / 10\n x4 = 1 * 10\n x2 = x4 - (x4 - x1) / z\n x3 = x1 + (x4 - x1) / z\n f1 = n(x1)\n f2 = n(x2)\n f3 = n(x3)\n f4 = n(x4)\n while x4 - x1 > accuracy:\n if f2 < f3:\n x4, f4 = x3, f3\n x3, f3 = x2, f2\n x2 = x4 - (x4 - x1) / z\n f2 = n(x2)\n else:\n x1, f1 = x2, f2\n x2, f2 = x3, f3\n x3 = x1 - (x4 - x1) / z\n f3 = n(x3)\n print('minimum falls at', 0.5 * (x1 + x4), 'K')\n",
"step-4": "<mask token>\nfrom gaussxw import gaussxwab\nfrom numpy import linspace, arange\nfrom pylab import plot, show, xlabel, ylabel\nfrom math import pi, exp, sqrt\nk = 1.38065e-23\nh = 6.626e-34\nlam1 = 3.9e-07\nlam2 = 7.5e-07\nc = 300000000.0\nT = linspace(300, 10000, 7000)\npart = str(input('what part would you like to do? (a, b, or c) '))\n\n\ndef n(T):\n k = 1.38065e-23\n c = 300000000.0\n N = 100\n a = h * c / (lam2 * k * T)\n b = h * c / (lam1 * k * T)\n x, w = gaussxwab(N, a, b)\n s = 0.0\n for k in range(N):\n s += w[k] * (x[k] ** 3 / (exp(x[k]) - 1))\n s = s * (15 / (pi * pi * pi * pi))\n return s\n\n\nif part in ['a'] or ['b']:\n lol = linspace(0, 7000, 7000)\n for i in range(len(T)):\n print('i =', i)\n lol = n(T[i])\n plot(T[i], lol, 'k-')\n show()\nif part in ['b']:\n z = (1 + sqrt(5)) / 2\n accuracy = 1e-06\n x1 = 1 / 10\n x4 = 1 * 10\n x2 = x4 - (x4 - x1) / z\n x3 = x1 + (x4 - x1) / z\n f1 = n(x1)\n f2 = n(x2)\n f3 = n(x3)\n f4 = n(x4)\n while x4 - x1 > accuracy:\n if f2 < f3:\n x4, f4 = x3, f3\n x3, f3 = x2, f2\n x2 = x4 - (x4 - x1) / z\n f2 = n(x2)\n else:\n x1, f1 = x2, f2\n x2, f2 = x3, f3\n x3 = x1 - (x4 - x1) / z\n f3 = n(x3)\n print('minimum falls at', 0.5 * (x1 + x4), 'K')\n",
"step-5": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sun Apr 29 15:10:34 2018\r\n\r\n@author: nit_n\r\n\"\"\"\r\n\r\nfrom gaussxw import gaussxwab\r\nfrom numpy import linspace, arange\r\nfrom pylab import plot, show, xlabel, ylabel\r\nfrom math import pi, exp, sqrt\r\n\r\nk = 1.38065e-23 # joules/kelvin\r\nh = 6.626e-34 # joules\r\nlam1 = 390e-9 # meters\r\nlam2 = 750e-9 # meters\r\nc = 3e8 # meters/second\r\n\r\nT = linspace(300, 10000, 7000)\r\n\r\npart = str(input(\"what part would you like to do? (a, b, or c) \"))\r\n\r\ndef n(T):\r\n \r\n k = 1.38065e-23 # joules/kelvin\r\n c = 3e8 # meters/second\r\n \r\n N = 100\r\n a = h*c/(lam2*k*T)\r\n b = h*c/(lam1*k*T)\r\n x,w = gaussxwab(N,a,b)\r\n s = 0.0\r\n \r\n for k in range(N):\r\n s += w[k]*(x[k]**3/(exp(x[k])-1))\r\n \r\n s = s*(15/(pi*pi*pi*pi))\r\n return s\r\n\r\nif part in ['a'] or ['b']:\r\n lol = linspace(0, 7000, 7000)\r\n for i in range(len(T)):\r\n print(\"i =\",i)\r\n lol = n(T[i])\r\n plot(T[i], lol, 'k-')\r\n show()\r\n\r\nif part in ['b']:\r\n z = (1 + sqrt(5))/2\r\n accuracy = 1e-6\r\n x1 = 1/10\r\n x4 = 1*10\r\n x2 = x4 - (x4 - x1)/z\r\n x3 = x1 + (x4 - x1)/z\r\n \r\n f1 = n(x1)\r\n f2 = n(x2)\r\n f3 = n(x3)\r\n f4 = n(x4)\r\n \r\n while x4-x1>accuracy:\r\n if f2<f3:\r\n x4,f4 = x3,f3\r\n x3,f3 = x2,f2\r\n x2 = x4 - (x4-x1)/z\r\n f2 = n(x2)\r\n else:\r\n x1,f1 = x2,f2\r\n x2,f2 = x3,f3\r\n x3 = x1 - (x4-x1)/z\r\n f3 = n(x3)\r\n\r\n print(\"minimum falls at\", 0.5*(x1+x4),\"K\") \r\n \r\n \r\n ",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-03 19:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mybus', '0007_auto_20160104_0053'),
]
operations = [
migrations.RemoveField(
model_name='businfo',
name='description',
),
migrations.AlterField(
model_name='businfo',
name='title',
field=models.CharField(max_length=255, verbose_name=b'Bus Info'),
),
migrations.AlterField(
model_name='businfo',
name='url',
field=models.CharField(max_length=255, verbose_name=b'Bus No'),
),
]
|
normal
|
{
"blob_id": "1dec7a997b0bef3226fb17e4039b053c7a2e457e",
"index": 9045,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('mybus', '0007_auto_20160104_0053')]\n operations = [migrations.RemoveField(model_name='businfo', name=\n 'description'), migrations.AlterField(model_name='businfo', name=\n 'title', field=models.CharField(max_length=255, verbose_name=\n b'Bus Info')), migrations.AlterField(model_name='businfo', name=\n 'url', field=models.CharField(max_length=255, verbose_name=b'Bus No'))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('mybus', '0007_auto_20160104_0053')]\n operations = [migrations.RemoveField(model_name='businfo', name=\n 'description'), migrations.AlterField(model_name='businfo', name=\n 'title', field=models.CharField(max_length=255, verbose_name=\n b'Bus Info')), migrations.AlterField(model_name='businfo', name=\n 'url', field=models.CharField(max_length=255, verbose_name=b'Bus No'))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.1 on 2016-01-03 19:28\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('mybus', '0007_auto_20160104_0053'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='businfo',\n name='description',\n ),\n migrations.AlterField(\n model_name='businfo',\n name='title',\n field=models.CharField(max_length=255, verbose_name=b'Bus Info'),\n ),\n migrations.AlterField(\n model_name='businfo',\n name='url',\n field=models.CharField(max_length=255, verbose_name=b'Bus No'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import pickle
from pathlib import Path
from rich.console import Console
from fourierdb import FourierDocument, FourierCollection, FourierDB
console = Console()
doc = FourierDocument({"bar": "eggs", "xyz": "spam"})
doc2 = FourierDocument({"a": "foo", "b": "bar"})
doc3 = FourierDocument({"abc": "xyz"})
doc4 = FourierDocument({1: 2, 3: 4, 5: 6})
doc5 = FourierDocument({"hello": [1, 2, 3, 4, 5, 6, 7, 8, 9]})
FOURIER_DIR = Path.home() / ".fourier"
FOURIER_LOGS = FOURIER_DIR / "logs"
FOURIER_DBS = FOURIER_DIR / "databases"
coll = FourierCollection("coll", doc, doc2)
coll2 = FourierCollection("coll2", doc3, doc4, doc5)
db = FourierDB("db")
db.add_collection(coll)
db.add_collection(coll2)
pickle.dump(db, open(""))
|
normal
|
{
"blob_id": "f15f96658130ac9bba748a518371ad80d9772fbc",
"index": 4121,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ndb.add_collection(coll)\ndb.add_collection(coll2)\npickle.dump(db, open(''))\n",
"step-3": "<mask token>\nconsole = Console()\ndoc = FourierDocument({'bar': 'eggs', 'xyz': 'spam'})\ndoc2 = FourierDocument({'a': 'foo', 'b': 'bar'})\ndoc3 = FourierDocument({'abc': 'xyz'})\ndoc4 = FourierDocument({(1): 2, (3): 4, (5): 6})\ndoc5 = FourierDocument({'hello': [1, 2, 3, 4, 5, 6, 7, 8, 9]})\nFOURIER_DIR = Path.home() / '.fourier'\nFOURIER_LOGS = FOURIER_DIR / 'logs'\nFOURIER_DBS = FOURIER_DIR / 'databases'\ncoll = FourierCollection('coll', doc, doc2)\ncoll2 = FourierCollection('coll2', doc3, doc4, doc5)\ndb = FourierDB('db')\ndb.add_collection(coll)\ndb.add_collection(coll2)\npickle.dump(db, open(''))\n",
"step-4": "import pickle\nfrom pathlib import Path\nfrom rich.console import Console\nfrom fourierdb import FourierDocument, FourierCollection, FourierDB\nconsole = Console()\ndoc = FourierDocument({'bar': 'eggs', 'xyz': 'spam'})\ndoc2 = FourierDocument({'a': 'foo', 'b': 'bar'})\ndoc3 = FourierDocument({'abc': 'xyz'})\ndoc4 = FourierDocument({(1): 2, (3): 4, (5): 6})\ndoc5 = FourierDocument({'hello': [1, 2, 3, 4, 5, 6, 7, 8, 9]})\nFOURIER_DIR = Path.home() / '.fourier'\nFOURIER_LOGS = FOURIER_DIR / 'logs'\nFOURIER_DBS = FOURIER_DIR / 'databases'\ncoll = FourierCollection('coll', doc, doc2)\ncoll2 = FourierCollection('coll2', doc3, doc4, doc5)\ndb = FourierDB('db')\ndb.add_collection(coll)\ndb.add_collection(coll2)\npickle.dump(db, open(''))\n",
"step-5": "import pickle\nfrom pathlib import Path\nfrom rich.console import Console\nfrom fourierdb import FourierDocument, FourierCollection, FourierDB\n\nconsole = Console()\n\ndoc = FourierDocument({\"bar\": \"eggs\", \"xyz\": \"spam\"})\ndoc2 = FourierDocument({\"a\": \"foo\", \"b\": \"bar\"})\ndoc3 = FourierDocument({\"abc\": \"xyz\"})\ndoc4 = FourierDocument({1: 2, 3: 4, 5: 6})\ndoc5 = FourierDocument({\"hello\": [1, 2, 3, 4, 5, 6, 7, 8, 9]})\nFOURIER_DIR = Path.home() / \".fourier\"\nFOURIER_LOGS = FOURIER_DIR / \"logs\"\nFOURIER_DBS = FOURIER_DIR / \"databases\"\ncoll = FourierCollection(\"coll\", doc, doc2)\ncoll2 = FourierCollection(\"coll2\", doc3, doc4, doc5)\n\ndb = FourierDB(\"db\")\n\ndb.add_collection(coll)\ndb.add_collection(coll2)\n\npickle.dump(db, open(\"\"))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def get_summary():
model = T5ForConditionalGeneration.from_pretrained('t5-small')
tokenizer = T5Tokenizer.from_pretrained('t5-small')
device = torch.device('cpu')
text = str(url_display1.get('1.0', tk.END))
preprocess_text = text.strip().replace('\n', '')
t5_prepared_Text = 'summarize: ' + preprocess_text
tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'
).to(device)
summary_ids = model.generate(tokenized_text, num_beams=4,
no_repeat_ngram_size=2, min_length=30, max_length=100,
early_stopping=True)
output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
Str1 = text
str2 = output
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
edited = len(text) - len(output)
Precision = (len(text) + len(output) + edited) / 2
Precisioncalc = Precision / 100
result = ('\n\nSummarized text: \n', output
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab2_display_text.insert(tk.END, result)
def open_pdf():
open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=
'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))
)
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display.insert(3.0, io)
def open_pdf1():
open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=
'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))
)
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display1.insert(3.0, io)
<|reserved_special_token_0|>
def clear_url_entry():
url_entry.delete(0, END)
<|reserved_special_token_0|>
def get_text():
raw_text = str(url_entry.get())
page = urlopen(raw_text)
soup = BeautifulSoup(page)
fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))
url_display.insert(tk.END, fetched_text)
def get_url_summary():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
result = '\nSummary:{}'.format(final_text)
tab3_display_text.insert(tk.END, result)
def use_spacy():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
print(final_text)
Str1 = raw_text
str2 = text_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nSpacy Summary:{}\n'.format(final_text
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab3_display_text.insert(tk.END, result)
def use_nltk():
raw_text = url_display.get('1.0', tk.END)
final_text = nltk_summarizer(raw_text)
print(final_text)
Str1 = raw_text
str2 = nltk_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nNLTK Summary:{}\n'.format(final_text
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab3_display_text.insert(tk.END, result)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def get_summary():
model = T5ForConditionalGeneration.from_pretrained('t5-small')
tokenizer = T5Tokenizer.from_pretrained('t5-small')
device = torch.device('cpu')
text = str(url_display1.get('1.0', tk.END))
preprocess_text = text.strip().replace('\n', '')
t5_prepared_Text = 'summarize: ' + preprocess_text
tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'
).to(device)
summary_ids = model.generate(tokenized_text, num_beams=4,
no_repeat_ngram_size=2, min_length=30, max_length=100,
early_stopping=True)
output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
Str1 = text
str2 = output
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
edited = len(text) - len(output)
Precision = (len(text) + len(output) + edited) / 2
Precisioncalc = Precision / 100
result = ('\n\nSummarized text: \n', output
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab2_display_text.insert(tk.END, result)
def open_pdf():
open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=
'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))
)
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display.insert(3.0, io)
def open_pdf1():
open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=
'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))
)
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display1.insert(3.0, io)
def clear_display_result():
tab3_display_text.delete('1.0', END)
def clear_url_entry():
url_entry.delete(0, END)
<|reserved_special_token_0|>
def get_text():
raw_text = str(url_entry.get())
page = urlopen(raw_text)
soup = BeautifulSoup(page)
fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))
url_display.insert(tk.END, fetched_text)
def get_url_summary():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
result = '\nSummary:{}'.format(final_text)
tab3_display_text.insert(tk.END, result)
def use_spacy():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
print(final_text)
Str1 = raw_text
str2 = text_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nSpacy Summary:{}\n'.format(final_text
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab3_display_text.insert(tk.END, result)
def use_nltk():
raw_text = url_display.get('1.0', tk.END)
final_text = nltk_summarizer(raw_text)
print(final_text)
Str1 = raw_text
str2 = nltk_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nNLTK Summary:{}\n'.format(final_text
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab3_display_text.insert(tk.END, result)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
window = Tk()
window.title('Summaryzer GUI')
window.geometry('700x400')
window.config(background='black')
style = ttk.Style(window)
style.configure('lefttab.TNotebook', tabposition='wn')
tab_control = ttk.Notebook(window, style='lefttab.TNotebook')
tab2 = ttk.Frame(tab_control)
tab3 = ttk.Frame(tab_control)
tab_control.add(tab3, text=f"{'Extractive':^20s}")
tab_control.add(tab2, text=f"{'Abstractive':^20s}")
label1 = Label(tab3, text='Extractive Summrize', padx=5, pady=5)
label1.grid(column=1, row=0)
label2 = Label(tab2, text='Abstractive Summrize', padx=5, pady=5)
label2.grid(column=0, row=0)
tab_control.pack(expand=1, fill='both')
def get_summary():
model = T5ForConditionalGeneration.from_pretrained('t5-small')
tokenizer = T5Tokenizer.from_pretrained('t5-small')
device = torch.device('cpu')
text = str(url_display1.get('1.0', tk.END))
preprocess_text = text.strip().replace('\n', '')
t5_prepared_Text = 'summarize: ' + preprocess_text
tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'
).to(device)
summary_ids = model.generate(tokenized_text, num_beams=4,
no_repeat_ngram_size=2, min_length=30, max_length=100,
early_stopping=True)
output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
Str1 = text
str2 = output
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
edited = len(text) - len(output)
Precision = (len(text) + len(output) + edited) / 2
Precisioncalc = Precision / 100
result = ('\n\nSummarized text: \n', output
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab2_display_text.insert(tk.END, result)
def open_pdf():
open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=
'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))
)
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display.insert(3.0, io)
def open_pdf1():
open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=
'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))
)
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display1.insert(3.0, io)
def clear_display_result():
tab3_display_text.delete('1.0', END)
def clear_url_entry():
url_entry.delete(0, END)
def openfiles():
file1 = tkinter.filedialog.askopenfilename(filetypes=(('Text Files',
'.txt'), ('All files', '*')))
read_text = open(file1).read()
url_display.insert(tk.END, read_text)
def get_text():
raw_text = str(url_entry.get())
page = urlopen(raw_text)
soup = BeautifulSoup(page)
fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))
url_display.insert(tk.END, fetched_text)
def get_url_summary():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
result = '\nSummary:{}'.format(final_text)
tab3_display_text.insert(tk.END, result)
def use_spacy():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
print(final_text)
Str1 = raw_text
str2 = text_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nSpacy Summary:{}\n'.format(final_text
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab3_display_text.insert(tk.END, result)
def use_nltk():
raw_text = url_display.get('1.0', tk.END)
final_text = nltk_summarizer(raw_text)
print(final_text)
Str1 = raw_text
str2 = nltk_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nNLTK Summary:{}\n'.format(final_text
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab3_display_text.insert(tk.END, result)
def use_gensim():
raw_text = url_display.get('1.0', tk.END)
final_text = summarize(raw_text)
print(final_text)
Str1 = raw_text
str2 = summarize(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nGensim Summary:{}\n'.format(final_text
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab3_display_text.insert(tk.END, result)
l1 = Label(tab3, text='Enter URL To Summarize')
l1.grid(row=1, column=0)
raw_entry = StringVar()
url_entry = Entry(tab3, textvariable=raw_entry, width=50)
url_entry.grid(row=1, column=1)
button1 = Button(tab3, text='Reset', command=clear_url_entry, width=12, bg=
'#03A9F4', fg='#fff')
button1.grid(row=4, column=0, padx=10, pady=10)
button2 = Button(tab3, text='Get Text', command=get_text, width=12, bg=
'#03A9F4', fg='#fff')
button2.grid(row=4, column=1, padx=10, pady=10)
button3 = Button(tab3, text='Open File', width=12, command=openfiles, bg=
'#c5cae9')
button3.grid(row=5, column=0, padx=10, pady=10)
button4 = Button(tab3, text='Open PDF', width=12, command=open_pdf, bg=
'#c5cae9')
button4.grid(row=5, column=1, padx=10, pady=10)
button5 = Button(tab3, text='SpaCy', command=use_spacy, width=12, bg='red',
fg='#fff')
button5.grid(row=8, column=0, padx=10, pady=10)
button6 = Button(tab3, text='Clear Result', command=clear_display_result,
width=12, bg='#03A9F4', fg='#fff')
button6.grid(row=9, column=1, padx=10, pady=10)
button7 = Button(tab3, text='NLTK', command=use_nltk, width=12, bg=
'#03A9F4', fg='#fff')
button7.grid(row=8, column=1, padx=10, pady=10)
button8 = Button(tab3, text='Gensim', command=use_gensim, width=12, bg=
'#03A9F4', fg='#fff')
button8.grid(row=9, column=0, padx=10, pady=10)
url_display = ScrolledText(tab3, height=10)
url_display.grid(row=7, column=0, columnspan=3, padx=5, pady=5)
tab3_display_text = ScrolledText(tab3, height=10)
tab3_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)
l1 = Label(tab2, text='Enter URL To Summarize')
l1.grid(row=1, column=0)
raw_entry1 = StringVar()
url_entry1 = Entry(tab2, textvariable=raw_entry, width=50)
url_entry1.grid(row=1, column=1)
button9 = Button(tab2, text='Reset', command=clear_url_entry, width=12, bg=
'#03A9F4', fg='#fff')
button9.grid(row=4, column=0, padx=10, pady=10)
button10 = Button(tab2, text='Get Text', command=get_text, width=12, bg=
'#03A9F4', fg='#fff')
button10.grid(row=4, column=1, padx=10, pady=10)
button11 = Button(tab2, text='Open File', width=12, command=openfiles, bg=
'#c5cae9')
button11.grid(row=5, column=0, padx=10, pady=10)
button12 = Button(tab2, text='Open PDF', width=12, command=open_pdf1, bg=
'#c5cae9')
button12.grid(row=5, column=1, padx=10, pady=10)
button13 = Button(tab2, text='Clear Result', command=clear_display_result,
width=12, bg='#03A9F4', fg='#fff')
button13.grid(row=9, column=1, padx=10, pady=10)
button14 = Button(tab2, text='Abstract', command=get_summary, width=12, bg=
'#03A9F4', fg='#fff')
button14.grid(row=9, column=0, padx=10, pady=10)
url_display1 = ScrolledText(tab2, height=10)
url_display1.grid(row=7, column=0, columnspan=3, padx=5, pady=5)
tab2_display_text = ScrolledText(tab2, height=10)
tab2_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)
window.mainloop()
<|reserved_special_token_1|>
import difflib
import tkinter as tk
from tkinter import *
from tkinter import ttk
from tkinter.scrolledtext import *
import tkinter.filedialog
import PyPDF2
from tkinter import filedialog
import torch
import json
from transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config
from spacy_summarization import text_summarizer
from gensim.summarization import summarize
from nltk_summarization import nltk_summarizer
from bs4 import BeautifulSoup
from urllib.request import urlopen
window = Tk()
window.title('Summaryzer GUI')
window.geometry('700x400')
window.config(background='black')
style = ttk.Style(window)
style.configure('lefttab.TNotebook', tabposition='wn')
tab_control = ttk.Notebook(window, style='lefttab.TNotebook')
tab2 = ttk.Frame(tab_control)
tab3 = ttk.Frame(tab_control)
tab_control.add(tab3, text=f"{'Extractive':^20s}")
tab_control.add(tab2, text=f"{'Abstractive':^20s}")
label1 = Label(tab3, text='Extractive Summrize', padx=5, pady=5)
label1.grid(column=1, row=0)
label2 = Label(tab2, text='Abstractive Summrize', padx=5, pady=5)
label2.grid(column=0, row=0)
tab_control.pack(expand=1, fill='both')
def get_summary():
model = T5ForConditionalGeneration.from_pretrained('t5-small')
tokenizer = T5Tokenizer.from_pretrained('t5-small')
device = torch.device('cpu')
text = str(url_display1.get('1.0', tk.END))
preprocess_text = text.strip().replace('\n', '')
t5_prepared_Text = 'summarize: ' + preprocess_text
tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'
).to(device)
summary_ids = model.generate(tokenized_text, num_beams=4,
no_repeat_ngram_size=2, min_length=30, max_length=100,
early_stopping=True)
output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
Str1 = text
str2 = output
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
edited = len(text) - len(output)
Precision = (len(text) + len(output) + edited) / 2
Precisioncalc = Precision / 100
result = ('\n\nSummarized text: \n', output
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab2_display_text.insert(tk.END, result)
def open_pdf():
open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=
'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))
)
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display.insert(3.0, io)
def open_pdf1():
open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=
'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))
)
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display1.insert(3.0, io)
def clear_display_result():
tab3_display_text.delete('1.0', END)
def clear_url_entry():
url_entry.delete(0, END)
def openfiles():
file1 = tkinter.filedialog.askopenfilename(filetypes=(('Text Files',
'.txt'), ('All files', '*')))
read_text = open(file1).read()
url_display.insert(tk.END, read_text)
def get_text():
raw_text = str(url_entry.get())
page = urlopen(raw_text)
soup = BeautifulSoup(page)
fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))
url_display.insert(tk.END, fetched_text)
def get_url_summary():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
result = '\nSummary:{}'.format(final_text)
tab3_display_text.insert(tk.END, result)
def use_spacy():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
print(final_text)
Str1 = raw_text
str2 = text_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nSpacy Summary:{}\n'.format(final_text
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab3_display_text.insert(tk.END, result)
def use_nltk():
raw_text = url_display.get('1.0', tk.END)
final_text = nltk_summarizer(raw_text)
print(final_text)
Str1 = raw_text
str2 = nltk_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nNLTK Summary:{}\n'.format(final_text
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab3_display_text.insert(tk.END, result)
def use_gensim():
raw_text = url_display.get('1.0', tk.END)
final_text = summarize(raw_text)
print(final_text)
Str1 = raw_text
str2 = summarize(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nGensim Summary:{}\n'.format(final_text
), ' Precision = ', Precisioncalc, ' similarity = ', printt
tab3_display_text.insert(tk.END, result)
l1 = Label(tab3, text='Enter URL To Summarize')
l1.grid(row=1, column=0)
raw_entry = StringVar()
url_entry = Entry(tab3, textvariable=raw_entry, width=50)
url_entry.grid(row=1, column=1)
button1 = Button(tab3, text='Reset', command=clear_url_entry, width=12, bg=
'#03A9F4', fg='#fff')
button1.grid(row=4, column=0, padx=10, pady=10)
button2 = Button(tab3, text='Get Text', command=get_text, width=12, bg=
'#03A9F4', fg='#fff')
button2.grid(row=4, column=1, padx=10, pady=10)
button3 = Button(tab3, text='Open File', width=12, command=openfiles, bg=
'#c5cae9')
button3.grid(row=5, column=0, padx=10, pady=10)
button4 = Button(tab3, text='Open PDF', width=12, command=open_pdf, bg=
'#c5cae9')
button4.grid(row=5, column=1, padx=10, pady=10)
button5 = Button(tab3, text='SpaCy', command=use_spacy, width=12, bg='red',
fg='#fff')
button5.grid(row=8, column=0, padx=10, pady=10)
button6 = Button(tab3, text='Clear Result', command=clear_display_result,
width=12, bg='#03A9F4', fg='#fff')
button6.grid(row=9, column=1, padx=10, pady=10)
button7 = Button(tab3, text='NLTK', command=use_nltk, width=12, bg=
'#03A9F4', fg='#fff')
button7.grid(row=8, column=1, padx=10, pady=10)
button8 = Button(tab3, text='Gensim', command=use_gensim, width=12, bg=
'#03A9F4', fg='#fff')
button8.grid(row=9, column=0, padx=10, pady=10)
url_display = ScrolledText(tab3, height=10)
url_display.grid(row=7, column=0, columnspan=3, padx=5, pady=5)
tab3_display_text = ScrolledText(tab3, height=10)
tab3_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)
l1 = Label(tab2, text='Enter URL To Summarize')
l1.grid(row=1, column=0)
raw_entry1 = StringVar()
url_entry1 = Entry(tab2, textvariable=raw_entry, width=50)
url_entry1.grid(row=1, column=1)
button9 = Button(tab2, text='Reset', command=clear_url_entry, width=12, bg=
'#03A9F4', fg='#fff')
button9.grid(row=4, column=0, padx=10, pady=10)
button10 = Button(tab2, text='Get Text', command=get_text, width=12, bg=
'#03A9F4', fg='#fff')
button10.grid(row=4, column=1, padx=10, pady=10)
button11 = Button(tab2, text='Open File', width=12, command=openfiles, bg=
'#c5cae9')
button11.grid(row=5, column=0, padx=10, pady=10)
button12 = Button(tab2, text='Open PDF', width=12, command=open_pdf1, bg=
'#c5cae9')
button12.grid(row=5, column=1, padx=10, pady=10)
button13 = Button(tab2, text='Clear Result', command=clear_display_result,
width=12, bg='#03A9F4', fg='#fff')
button13.grid(row=9, column=1, padx=10, pady=10)
button14 = Button(tab2, text='Abstract', command=get_summary, width=12, bg=
'#03A9F4', fg='#fff')
button14.grid(row=9, column=0, padx=10, pady=10)
url_display1 = ScrolledText(tab2, height=10)
url_display1.grid(row=7, column=0, columnspan=3, padx=5, pady=5)
tab2_display_text = ScrolledText(tab2, height=10)
tab2_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)
window.mainloop()
<|reserved_special_token_1|>
# Core Packages
import difflib
import tkinter as tk
from tkinter import *
from tkinter import ttk
from tkinter.scrolledtext import *
import tkinter.filedialog
import PyPDF2
from tkinter import filedialog
import torch
import json
from transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config
# NLP Pkgs
from spacy_summarization import text_summarizer
from gensim.summarization import summarize
from nltk_summarization import nltk_summarizer
# Web Scraping Pkg
from bs4 import BeautifulSoup
from urllib.request import urlopen
# Structure and Layout
window = Tk()
window.title("Summaryzer GUI")
window.geometry("700x400")
window.config(background='black')
style = ttk.Style(window)
style.configure('lefttab.TNotebook', tabposition='wn', )
# TAB LAYOUT
tab_control = ttk.Notebook(window, style='lefttab.TNotebook')
tab2 = ttk.Frame(tab_control)
tab3 = ttk.Frame(tab_control)
# ADD TABS TO NOTEBOOK
tab_control.add(tab3, text=f'{"Extractive":^20s}')
tab_control.add(tab2, text=f'{"Abstractive":^20s}')
label1 = Label(tab3, text='Extractive Summrize', padx=5, pady=5)
label1.grid(column=1, row=0)
label2 = Label(tab2, text='Abstractive Summrize',padx=5, pady=5)
label2.grid(column=0, row=0)
tab_control.pack(expand=1, fill='both')
def get_summary():
model = T5ForConditionalGeneration.from_pretrained ('t5-small')
tokenizer = T5Tokenizer.from_pretrained ('t5-small')
device = torch.device ('cpu')
text = str(url_display1.get('1.0', tk.END))
preprocess_text = text.strip ().replace ("\n", "")
t5_prepared_Text = "summarize: " + preprocess_text
tokenized_text = tokenizer.encode (t5_prepared_Text, return_tensors="pt").to (device)
summary_ids = model.generate (tokenized_text,
num_beams=4,
no_repeat_ngram_size=2,
min_length=30,
max_length=100,
early_stopping=True)
output = tokenizer.decode (summary_ids[0], skip_special_tokens=True)
Str1 = text
str2 = output
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
edited = len(text)-len(output)
Precision = (len(text)+len(output)+edited)/2
Precisioncalc = Precision / 100
result =("\n\nSummarized text: \n", output)," Precision = " , Precisioncalc , " similarity = " , printt
tab2_display_text.insert(tk.END, result)
def open_pdf():
open_file = filedialog.askopenfilename(
initialdir="C:/gui/",
title="Open PDF File",
filetypes=(
("PDF Files", "*.pdf"),
("All Files", ".")))
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display.insert(3.0, io)
def open_pdf1():
open_file = filedialog.askopenfilename(
initialdir="C:/gui/",
title="Open PDF File",
filetypes=(
("PDF Files", "*.pdf"),
("All Files", ".")))
if open_file:
pdf_file = PyPDF2.PdfFileReader(open_file)
page = pdf_file.getPage(0)
page_stuff = page.extractText()
io = page_stuff.split()
url_display1.insert(3.0, io)
def clear_display_result():
tab3_display_text.delete('1.0', END)
# Clear For URL
def clear_url_entry():
url_entry.delete(0, END)
# Open File to Read and Process
def openfiles():
file1 = tkinter.filedialog.askopenfilename(filetypes=(("Text Files", ".txt"), ("All files", "*")))
read_text = open(file1).read()
url_display.insert(tk.END, read_text)
def get_text():
raw_text = str(url_entry.get())
page = urlopen(raw_text)
soup = BeautifulSoup(page)
fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))
url_display.insert(tk.END, fetched_text)
def get_url_summary():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
result = '\nSummary:{}'.format(final_text)
tab3_display_text.insert(tk.END, result)
def use_spacy ():
raw_text = url_display.get('1.0', tk.END)
final_text = text_summarizer(raw_text)
print(final_text)
Str1 = raw_text
str2 = text_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nSpacy Summary:{}\n'.format(final_text)," Precision = " , Precisioncalc , " similarity = " , printt
tab3_display_text.insert(tk.END, result)
def use_nltk():
raw_text = url_display.get ('1.0', tk.END)
final_text = nltk_summarizer (raw_text)
print (final_text)
Str1 = raw_text
str2 = nltk_summarizer(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result = '\nNLTK Summary:{}\n'.format(final_text)," Precision = " , Precisioncalc , " similarity = " , printt
tab3_display_text.insert(tk.END, result)
def use_gensim():
raw_text = url_display.get ('1.0', tk.END)
final_text = summarize(raw_text)
print (final_text)
Str1 = raw_text
str2 = summarize(raw_text)
printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100
Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)
Precisioncalc = Precision / 100
result ='\nGensim Summary:{}\n'.format(final_text)," Precision = " , Precisioncalc , " similarity = " , printt
tab3_display_text.insert(tk.END, result)
# URL TAB
l1 = Label(tab3, text="Enter URL To Summarize")
l1.grid(row=1, column=0)
raw_entry = StringVar()
url_entry = Entry(tab3, textvariable=raw_entry, width=50)
url_entry.grid(row=1, column=1)
# BUTTONS
button1 = Button(tab3, text="Reset", command=clear_url_entry, width=12, bg='#03A9F4', fg='#fff')
button1.grid(row=4, column=0, padx=10, pady=10)
button2 = Button(tab3, text="Get Text", command=get_text, width=12, bg='#03A9F4', fg='#fff')
button2.grid(row=4, column=1, padx=10, pady=10)
button3 = Button(tab3, text="Open File", width=12, command=openfiles, bg='#c5cae9')
button3.grid(row=5, column=0, padx=10, pady=10)
button4 = Button(tab3, text="Open PDF", width=12, command=open_pdf, bg='#c5cae9')
button4.grid(row=5, column=1, padx=10, pady=10)
button5 = Button(tab3, text="SpaCy", command=use_spacy, width=12, bg='red', fg='#fff')
button5.grid(row=8, column=0, padx=10, pady=10)
button6 = Button(tab3, text="Clear Result", command=clear_display_result, width=12, bg='#03A9F4', fg='#fff')
button6.grid(row=9, column=1, padx=10, pady=10)
button7 = Button(tab3, text="NLTK", command=use_nltk, width=12, bg='#03A9F4', fg='#fff')
button7.grid(row=8, column=1, padx=10, pady=10)
button8 = Button(tab3, text="Gensim", command=use_gensim, width=12, bg='#03A9F4', fg='#fff')
button8.grid(row=9, column=0, padx=10, pady=10)
# Display Screen For Result
url_display = ScrolledText(tab3, height=10)
url_display.grid(row=7, column=0, columnspan=3, padx=5, pady=5)
tab3_display_text = ScrolledText(tab3, height=10)
tab3_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)
l1 = Label(tab2, text="Enter URL To Summarize")
l1.grid(row=1, column=0)
raw_entry1 = StringVar()
url_entry1 = Entry(tab2, textvariable=raw_entry, width=50)
url_entry1.grid(row=1, column=1)
# BUTTONS
button9 = Button(tab2, text="Reset", command=clear_url_entry, width=12, bg='#03A9F4', fg='#fff')
button9.grid(row=4, column=0, padx=10, pady=10)
button10 = Button(tab2, text="Get Text", command=get_text, width=12, bg='#03A9F4', fg='#fff')
button10.grid(row=4, column=1, padx=10, pady=10)
button11 = Button(tab2, text="Open File", width=12, command=openfiles, bg='#c5cae9')
button11.grid(row=5, column=0, padx=10, pady=10)
button12 = Button(tab2, text="Open PDF", width=12, command=open_pdf1, bg='#c5cae9')
button12.grid(row=5, column=1, padx=10, pady=10)
button13 = Button(tab2, text="Clear Result", command=clear_display_result, width=12, bg='#03A9F4', fg='#fff')
button13.grid(row=9, column=1, padx=10, pady=10)
button14 = Button(tab2, text="Abstract", command=get_summary, width=12, bg='#03A9F4', fg='#fff')
button14.grid(row=9, column=0, padx=10, pady=10)
url_display1 = ScrolledText(tab2, height=10)
url_display1.grid(row=7, column=0, columnspan=3, padx=5, pady=5)
tab2_display_text = ScrolledText(tab2, height=10)
tab2_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)
window.mainloop()
|
flexible
|
{
"blob_id": "e3dece36ba3e5b3df763e7119c485f6ed2155098",
"index": 795,
"step-1": "<mask token>\n\n\ndef get_summary():\n model = T5ForConditionalGeneration.from_pretrained('t5-small')\n tokenizer = T5Tokenizer.from_pretrained('t5-small')\n device = torch.device('cpu')\n text = str(url_display1.get('1.0', tk.END))\n preprocess_text = text.strip().replace('\\n', '')\n t5_prepared_Text = 'summarize: ' + preprocess_text\n tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'\n ).to(device)\n summary_ids = model.generate(tokenized_text, num_beams=4,\n no_repeat_ngram_size=2, min_length=30, max_length=100,\n early_stopping=True)\n output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)\n Str1 = text\n str2 = output\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n edited = len(text) - len(output)\n Precision = (len(text) + len(output) + edited) / 2\n Precisioncalc = Precision / 100\n result = ('\\n\\nSummarized text: \\n', output\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab2_display_text.insert(tk.END, result)\n\n\ndef open_pdf():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display.insert(3.0, io)\n\n\ndef open_pdf1():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display1.insert(3.0, io)\n\n\n<mask token>\n\n\ndef clear_url_entry():\n url_entry.delete(0, END)\n\n\n<mask token>\n\n\ndef get_text():\n raw_text = str(url_entry.get())\n page = urlopen(raw_text)\n soup = BeautifulSoup(page)\n fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))\n url_display.insert(tk.END, fetched_text)\n\n\ndef get_url_summary():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n result = '\\nSummary:{}'.format(final_text)\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_spacy():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = text_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nSpacy Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_nltk():\n raw_text = url_display.get('1.0', tk.END)\n final_text = nltk_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = nltk_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nNLTK Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_summary():\n model = T5ForConditionalGeneration.from_pretrained('t5-small')\n tokenizer = T5Tokenizer.from_pretrained('t5-small')\n device = torch.device('cpu')\n text = str(url_display1.get('1.0', tk.END))\n preprocess_text = text.strip().replace('\\n', '')\n t5_prepared_Text = 'summarize: ' + preprocess_text\n tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'\n ).to(device)\n summary_ids = model.generate(tokenized_text, num_beams=4,\n no_repeat_ngram_size=2, min_length=30, max_length=100,\n early_stopping=True)\n output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)\n Str1 = text\n str2 = output\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n edited = len(text) - len(output)\n Precision = (len(text) + len(output) + edited) / 2\n Precisioncalc = Precision / 100\n result = ('\\n\\nSummarized text: \\n', output\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab2_display_text.insert(tk.END, result)\n\n\ndef open_pdf():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display.insert(3.0, io)\n\n\ndef open_pdf1():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display1.insert(3.0, io)\n\n\ndef clear_display_result():\n tab3_display_text.delete('1.0', END)\n\n\ndef clear_url_entry():\n url_entry.delete(0, END)\n\n\n<mask token>\n\n\ndef get_text():\n raw_text = str(url_entry.get())\n page = urlopen(raw_text)\n soup = BeautifulSoup(page)\n fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))\n url_display.insert(tk.END, fetched_text)\n\n\ndef get_url_summary():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n result = '\\nSummary:{}'.format(final_text)\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_spacy():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = text_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nSpacy Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_nltk():\n raw_text = url_display.get('1.0', tk.END)\n final_text = nltk_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = nltk_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nNLTK Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\n<mask token>\n",
"step-3": "<mask token>\nwindow = Tk()\nwindow.title('Summaryzer GUI')\nwindow.geometry('700x400')\nwindow.config(background='black')\nstyle = ttk.Style(window)\nstyle.configure('lefttab.TNotebook', tabposition='wn')\ntab_control = ttk.Notebook(window, style='lefttab.TNotebook')\ntab2 = ttk.Frame(tab_control)\ntab3 = ttk.Frame(tab_control)\ntab_control.add(tab3, text=f\"{'Extractive':^20s}\")\ntab_control.add(tab2, text=f\"{'Abstractive':^20s}\")\nlabel1 = Label(tab3, text='Extractive Summrize', padx=5, pady=5)\nlabel1.grid(column=1, row=0)\nlabel2 = Label(tab2, text='Abstractive Summrize', padx=5, pady=5)\nlabel2.grid(column=0, row=0)\ntab_control.pack(expand=1, fill='both')\n\n\ndef get_summary():\n model = T5ForConditionalGeneration.from_pretrained('t5-small')\n tokenizer = T5Tokenizer.from_pretrained('t5-small')\n device = torch.device('cpu')\n text = str(url_display1.get('1.0', tk.END))\n preprocess_text = text.strip().replace('\\n', '')\n t5_prepared_Text = 'summarize: ' + preprocess_text\n tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'\n ).to(device)\n summary_ids = model.generate(tokenized_text, num_beams=4,\n no_repeat_ngram_size=2, min_length=30, max_length=100,\n early_stopping=True)\n output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)\n Str1 = text\n str2 = output\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n edited = len(text) - len(output)\n Precision = (len(text) + len(output) + edited) / 2\n Precisioncalc = Precision / 100\n result = ('\\n\\nSummarized text: \\n', output\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab2_display_text.insert(tk.END, result)\n\n\ndef open_pdf():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display.insert(3.0, io)\n\n\ndef open_pdf1():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display1.insert(3.0, io)\n\n\ndef clear_display_result():\n tab3_display_text.delete('1.0', END)\n\n\ndef clear_url_entry():\n url_entry.delete(0, END)\n\n\ndef openfiles():\n file1 = tkinter.filedialog.askopenfilename(filetypes=(('Text Files',\n '.txt'), ('All files', '*')))\n read_text = open(file1).read()\n url_display.insert(tk.END, read_text)\n\n\ndef get_text():\n raw_text = str(url_entry.get())\n page = urlopen(raw_text)\n soup = BeautifulSoup(page)\n fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))\n url_display.insert(tk.END, fetched_text)\n\n\ndef get_url_summary():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n result = '\\nSummary:{}'.format(final_text)\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_spacy():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = text_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nSpacy Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_nltk():\n raw_text = url_display.get('1.0', tk.END)\n final_text = nltk_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = nltk_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nNLTK Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_gensim():\n raw_text = url_display.get('1.0', tk.END)\n final_text = summarize(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = summarize(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nGensim Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\nl1 = Label(tab3, text='Enter URL To Summarize')\nl1.grid(row=1, column=0)\nraw_entry = StringVar()\nurl_entry = Entry(tab3, textvariable=raw_entry, width=50)\nurl_entry.grid(row=1, column=1)\nbutton1 = Button(tab3, text='Reset', command=clear_url_entry, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton1.grid(row=4, column=0, padx=10, pady=10)\nbutton2 = Button(tab3, text='Get Text', command=get_text, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton2.grid(row=4, column=1, padx=10, pady=10)\nbutton3 = Button(tab3, text='Open File', width=12, command=openfiles, bg=\n '#c5cae9')\nbutton3.grid(row=5, column=0, padx=10, pady=10)\nbutton4 = Button(tab3, text='Open PDF', width=12, command=open_pdf, bg=\n '#c5cae9')\nbutton4.grid(row=5, column=1, padx=10, pady=10)\nbutton5 = Button(tab3, text='SpaCy', command=use_spacy, width=12, bg='red',\n fg='#fff')\nbutton5.grid(row=8, column=0, padx=10, pady=10)\nbutton6 = Button(tab3, text='Clear Result', command=clear_display_result,\n width=12, bg='#03A9F4', fg='#fff')\nbutton6.grid(row=9, column=1, padx=10, pady=10)\nbutton7 = Button(tab3, text='NLTK', command=use_nltk, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton7.grid(row=8, column=1, padx=10, pady=10)\nbutton8 = Button(tab3, text='Gensim', command=use_gensim, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton8.grid(row=9, column=0, padx=10, pady=10)\nurl_display = ScrolledText(tab3, height=10)\nurl_display.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\ntab3_display_text = ScrolledText(tab3, height=10)\ntab3_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\nl1 = Label(tab2, text='Enter URL To Summarize')\nl1.grid(row=1, column=0)\nraw_entry1 = StringVar()\nurl_entry1 = Entry(tab2, textvariable=raw_entry, width=50)\nurl_entry1.grid(row=1, column=1)\nbutton9 = Button(tab2, text='Reset', command=clear_url_entry, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton9.grid(row=4, column=0, padx=10, pady=10)\nbutton10 = Button(tab2, text='Get Text', command=get_text, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton10.grid(row=4, column=1, padx=10, pady=10)\nbutton11 = Button(tab2, text='Open File', width=12, command=openfiles, bg=\n '#c5cae9')\nbutton11.grid(row=5, column=0, padx=10, pady=10)\nbutton12 = Button(tab2, text='Open PDF', width=12, command=open_pdf1, bg=\n '#c5cae9')\nbutton12.grid(row=5, column=1, padx=10, pady=10)\nbutton13 = Button(tab2, text='Clear Result', command=clear_display_result,\n width=12, bg='#03A9F4', fg='#fff')\nbutton13.grid(row=9, column=1, padx=10, pady=10)\nbutton14 = Button(tab2, text='Abstract', command=get_summary, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton14.grid(row=9, column=0, padx=10, pady=10)\nurl_display1 = ScrolledText(tab2, height=10)\nurl_display1.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\ntab2_display_text = ScrolledText(tab2, height=10)\ntab2_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\nwindow.mainloop()\n",
"step-4": "import difflib\nimport tkinter as tk\nfrom tkinter import *\nfrom tkinter import ttk\nfrom tkinter.scrolledtext import *\nimport tkinter.filedialog\nimport PyPDF2\nfrom tkinter import filedialog\nimport torch\nimport json\nfrom transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config\nfrom spacy_summarization import text_summarizer\nfrom gensim.summarization import summarize\nfrom nltk_summarization import nltk_summarizer\nfrom bs4 import BeautifulSoup\nfrom urllib.request import urlopen\nwindow = Tk()\nwindow.title('Summaryzer GUI')\nwindow.geometry('700x400')\nwindow.config(background='black')\nstyle = ttk.Style(window)\nstyle.configure('lefttab.TNotebook', tabposition='wn')\ntab_control = ttk.Notebook(window, style='lefttab.TNotebook')\ntab2 = ttk.Frame(tab_control)\ntab3 = ttk.Frame(tab_control)\ntab_control.add(tab3, text=f\"{'Extractive':^20s}\")\ntab_control.add(tab2, text=f\"{'Abstractive':^20s}\")\nlabel1 = Label(tab3, text='Extractive Summrize', padx=5, pady=5)\nlabel1.grid(column=1, row=0)\nlabel2 = Label(tab2, text='Abstractive Summrize', padx=5, pady=5)\nlabel2.grid(column=0, row=0)\ntab_control.pack(expand=1, fill='both')\n\n\ndef get_summary():\n model = T5ForConditionalGeneration.from_pretrained('t5-small')\n tokenizer = T5Tokenizer.from_pretrained('t5-small')\n device = torch.device('cpu')\n text = str(url_display1.get('1.0', tk.END))\n preprocess_text = text.strip().replace('\\n', '')\n t5_prepared_Text = 'summarize: ' + preprocess_text\n tokenized_text = tokenizer.encode(t5_prepared_Text, return_tensors='pt'\n ).to(device)\n summary_ids = model.generate(tokenized_text, num_beams=4,\n no_repeat_ngram_size=2, min_length=30, max_length=100,\n early_stopping=True)\n output = tokenizer.decode(summary_ids[0], skip_special_tokens=True)\n Str1 = text\n str2 = output\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n edited = len(text) - len(output)\n Precision = (len(text) + len(output) + edited) / 2\n Precisioncalc = Precision / 100\n result = ('\\n\\nSummarized text: \\n', output\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab2_display_text.insert(tk.END, result)\n\n\ndef open_pdf():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display.insert(3.0, io)\n\n\ndef open_pdf1():\n open_file = filedialog.askopenfilename(initialdir='C:/gui/', title=\n 'Open PDF File', filetypes=(('PDF Files', '*.pdf'), ('All Files', '.'))\n )\n if open_file:\n pdf_file = PyPDF2.PdfFileReader(open_file)\n page = pdf_file.getPage(0)\n page_stuff = page.extractText()\n io = page_stuff.split()\n url_display1.insert(3.0, io)\n\n\ndef clear_display_result():\n tab3_display_text.delete('1.0', END)\n\n\ndef clear_url_entry():\n url_entry.delete(0, END)\n\n\ndef openfiles():\n file1 = tkinter.filedialog.askopenfilename(filetypes=(('Text Files',\n '.txt'), ('All files', '*')))\n read_text = open(file1).read()\n url_display.insert(tk.END, read_text)\n\n\ndef get_text():\n raw_text = str(url_entry.get())\n page = urlopen(raw_text)\n soup = BeautifulSoup(page)\n fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))\n url_display.insert(tk.END, fetched_text)\n\n\ndef get_url_summary():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n result = '\\nSummary:{}'.format(final_text)\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_spacy():\n raw_text = url_display.get('1.0', tk.END)\n final_text = text_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = text_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nSpacy Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_nltk():\n raw_text = url_display.get('1.0', tk.END)\n final_text = nltk_summarizer(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = nltk_summarizer(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nNLTK Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\ndef use_gensim():\n raw_text = url_display.get('1.0', tk.END)\n final_text = summarize(raw_text)\n print(final_text)\n Str1 = raw_text\n str2 = summarize(raw_text)\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\n Precisioncalc = Precision / 100\n result = '\\nGensim Summary:{}\\n'.format(final_text\n ), ' Precision = ', Precisioncalc, ' similarity = ', printt\n tab3_display_text.insert(tk.END, result)\n\n\nl1 = Label(tab3, text='Enter URL To Summarize')\nl1.grid(row=1, column=0)\nraw_entry = StringVar()\nurl_entry = Entry(tab3, textvariable=raw_entry, width=50)\nurl_entry.grid(row=1, column=1)\nbutton1 = Button(tab3, text='Reset', command=clear_url_entry, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton1.grid(row=4, column=0, padx=10, pady=10)\nbutton2 = Button(tab3, text='Get Text', command=get_text, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton2.grid(row=4, column=1, padx=10, pady=10)\nbutton3 = Button(tab3, text='Open File', width=12, command=openfiles, bg=\n '#c5cae9')\nbutton3.grid(row=5, column=0, padx=10, pady=10)\nbutton4 = Button(tab3, text='Open PDF', width=12, command=open_pdf, bg=\n '#c5cae9')\nbutton4.grid(row=5, column=1, padx=10, pady=10)\nbutton5 = Button(tab3, text='SpaCy', command=use_spacy, width=12, bg='red',\n fg='#fff')\nbutton5.grid(row=8, column=0, padx=10, pady=10)\nbutton6 = Button(tab3, text='Clear Result', command=clear_display_result,\n width=12, bg='#03A9F4', fg='#fff')\nbutton6.grid(row=9, column=1, padx=10, pady=10)\nbutton7 = Button(tab3, text='NLTK', command=use_nltk, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton7.grid(row=8, column=1, padx=10, pady=10)\nbutton8 = Button(tab3, text='Gensim', command=use_gensim, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton8.grid(row=9, column=0, padx=10, pady=10)\nurl_display = ScrolledText(tab3, height=10)\nurl_display.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\ntab3_display_text = ScrolledText(tab3, height=10)\ntab3_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\nl1 = Label(tab2, text='Enter URL To Summarize')\nl1.grid(row=1, column=0)\nraw_entry1 = StringVar()\nurl_entry1 = Entry(tab2, textvariable=raw_entry, width=50)\nurl_entry1.grid(row=1, column=1)\nbutton9 = Button(tab2, text='Reset', command=clear_url_entry, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton9.grid(row=4, column=0, padx=10, pady=10)\nbutton10 = Button(tab2, text='Get Text', command=get_text, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton10.grid(row=4, column=1, padx=10, pady=10)\nbutton11 = Button(tab2, text='Open File', width=12, command=openfiles, bg=\n '#c5cae9')\nbutton11.grid(row=5, column=0, padx=10, pady=10)\nbutton12 = Button(tab2, text='Open PDF', width=12, command=open_pdf1, bg=\n '#c5cae9')\nbutton12.grid(row=5, column=1, padx=10, pady=10)\nbutton13 = Button(tab2, text='Clear Result', command=clear_display_result,\n width=12, bg='#03A9F4', fg='#fff')\nbutton13.grid(row=9, column=1, padx=10, pady=10)\nbutton14 = Button(tab2, text='Abstract', command=get_summary, width=12, bg=\n '#03A9F4', fg='#fff')\nbutton14.grid(row=9, column=0, padx=10, pady=10)\nurl_display1 = ScrolledText(tab2, height=10)\nurl_display1.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\ntab2_display_text = ScrolledText(tab2, height=10)\ntab2_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\nwindow.mainloop()\n",
"step-5": "# Core Packages\r\nimport difflib\r\nimport tkinter as tk\r\nfrom tkinter import *\r\nfrom tkinter import ttk\r\nfrom tkinter.scrolledtext import *\r\nimport tkinter.filedialog\r\nimport PyPDF2\r\nfrom tkinter import filedialog\r\nimport torch\r\nimport json\r\nfrom transformers import T5Tokenizer, T5ForConditionalGeneration, T5Config\r\n\r\n# NLP Pkgs\r\nfrom spacy_summarization import text_summarizer\r\nfrom gensim.summarization import summarize\r\nfrom nltk_summarization import nltk_summarizer\r\n\r\n# Web Scraping Pkg\r\nfrom bs4 import BeautifulSoup\r\nfrom urllib.request import urlopen\r\n\r\n# Structure and Layout\r\nwindow = Tk()\r\nwindow.title(\"Summaryzer GUI\")\r\nwindow.geometry(\"700x400\")\r\nwindow.config(background='black')\r\n\r\nstyle = ttk.Style(window)\r\nstyle.configure('lefttab.TNotebook', tabposition='wn', )\r\n\r\n# TAB LAYOUT\r\ntab_control = ttk.Notebook(window, style='lefttab.TNotebook')\r\n\r\ntab2 = ttk.Frame(tab_control)\r\ntab3 = ttk.Frame(tab_control)\r\n\r\n# ADD TABS TO NOTEBOOK\r\ntab_control.add(tab3, text=f'{\"Extractive\":^20s}')\r\ntab_control.add(tab2, text=f'{\"Abstractive\":^20s}')\r\n\r\nlabel1 = Label(tab3, text='Extractive Summrize', padx=5, pady=5)\r\nlabel1.grid(column=1, row=0)\r\n\r\n\r\nlabel2 = Label(tab2, text='Abstractive Summrize',padx=5, pady=5)\r\nlabel2.grid(column=0, row=0)\r\n\r\ntab_control.pack(expand=1, fill='both')\r\n\r\ndef get_summary():\r\n model = T5ForConditionalGeneration.from_pretrained ('t5-small')\r\n tokenizer = T5Tokenizer.from_pretrained ('t5-small')\r\n device = torch.device ('cpu')\r\n text = str(url_display1.get('1.0', tk.END))\r\n preprocess_text = text.strip ().replace (\"\\n\", \"\")\r\n t5_prepared_Text = \"summarize: \" + preprocess_text\r\n tokenized_text = tokenizer.encode (t5_prepared_Text, return_tensors=\"pt\").to (device)\r\n\r\n summary_ids = model.generate (tokenized_text,\r\n num_beams=4,\r\n no_repeat_ngram_size=2,\r\n min_length=30,\r\n max_length=100,\r\n early_stopping=True)\r\n\r\n output = tokenizer.decode (summary_ids[0], skip_special_tokens=True)\r\n\r\n Str1 = text\r\n str2 = output\r\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\r\n\r\n edited = len(text)-len(output)\r\n Precision = (len(text)+len(output)+edited)/2\r\n Precisioncalc = Precision / 100\r\n\r\n result =(\"\\n\\nSummarized text: \\n\", output),\" Precision = \" , Precisioncalc , \" similarity = \" , printt\r\n\r\n tab2_display_text.insert(tk.END, result)\r\n\r\ndef open_pdf():\r\n open_file = filedialog.askopenfilename(\r\n initialdir=\"C:/gui/\",\r\n title=\"Open PDF File\",\r\n filetypes=(\r\n (\"PDF Files\", \"*.pdf\"),\r\n (\"All Files\", \".\")))\r\n\r\n if open_file:\r\n pdf_file = PyPDF2.PdfFileReader(open_file)\r\n page = pdf_file.getPage(0)\r\n page_stuff = page.extractText()\r\n io = page_stuff.split()\r\n url_display.insert(3.0, io)\r\n\r\n\r\ndef open_pdf1():\r\n open_file = filedialog.askopenfilename(\r\n initialdir=\"C:/gui/\",\r\n title=\"Open PDF File\",\r\n filetypes=(\r\n (\"PDF Files\", \"*.pdf\"),\r\n (\"All Files\", \".\")))\r\n\r\n if open_file:\r\n pdf_file = PyPDF2.PdfFileReader(open_file)\r\n page = pdf_file.getPage(0)\r\n page_stuff = page.extractText()\r\n io = page_stuff.split()\r\n url_display1.insert(3.0, io)\r\n\r\n\r\ndef clear_display_result():\r\n tab3_display_text.delete('1.0', END)\r\n\r\n# Clear For URL\r\ndef clear_url_entry():\r\n url_entry.delete(0, END)\r\n\r\n\r\n# Open File to Read and Process\r\ndef openfiles():\r\n file1 = tkinter.filedialog.askopenfilename(filetypes=((\"Text Files\", \".txt\"), (\"All files\", \"*\")))\r\n read_text = open(file1).read()\r\n url_display.insert(tk.END, read_text)\r\n\r\n\r\ndef get_text():\r\n raw_text = str(url_entry.get())\r\n page = urlopen(raw_text)\r\n soup = BeautifulSoup(page)\r\n fetched_text = ' '.join(map(lambda p: p.text, soup.find_all('p')))\r\n url_display.insert(tk.END, fetched_text)\r\n\r\n\r\ndef get_url_summary():\r\n raw_text = url_display.get('1.0', tk.END)\r\n final_text = text_summarizer(raw_text)\r\n result = '\\nSummary:{}'.format(final_text)\r\n tab3_display_text.insert(tk.END, result)\r\n\r\n\r\ndef use_spacy ():\r\n\r\n raw_text = url_display.get('1.0', tk.END)\r\n final_text = text_summarizer(raw_text)\r\n print(final_text)\r\n\r\n Str1 = raw_text\r\n str2 = text_summarizer(raw_text)\r\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\r\n\r\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\r\n Precisioncalc = Precision / 100\r\n result = '\\nSpacy Summary:{}\\n'.format(final_text),\" Precision = \" , Precisioncalc , \" similarity = \" , printt\r\n tab3_display_text.insert(tk.END, result)\r\n\r\n\r\ndef use_nltk():\r\n raw_text = url_display.get ('1.0', tk.END)\r\n final_text = nltk_summarizer (raw_text)\r\n print (final_text)\r\n\r\n Str1 = raw_text\r\n str2 = nltk_summarizer(raw_text)\r\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\r\n\r\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\r\n Precisioncalc = Precision / 100\r\n result = '\\nNLTK Summary:{}\\n'.format(final_text),\" Precision = \" , Precisioncalc , \" similarity = \" , printt\r\n tab3_display_text.insert(tk.END, result)\r\n\r\ndef use_gensim():\r\n raw_text = url_display.get ('1.0', tk.END)\r\n final_text = summarize(raw_text)\r\n print (final_text)\r\n Str1 = raw_text\r\n str2 = summarize(raw_text)\r\n printt = difflib.SequenceMatcher(None, Str1, str2, False).ratio() * 100\r\n\r\n Precision = len(raw_text) + len(nltk_summarizer(raw_text)) / len(raw_text)\r\n Precisioncalc = Precision / 100\r\n result ='\\nGensim Summary:{}\\n'.format(final_text),\" Precision = \" , Precisioncalc , \" similarity = \" , printt\r\n tab3_display_text.insert(tk.END, result)\r\n\r\n\r\n# URL TAB\r\nl1 = Label(tab3, text=\"Enter URL To Summarize\")\r\nl1.grid(row=1, column=0)\r\n\r\nraw_entry = StringVar()\r\nurl_entry = Entry(tab3, textvariable=raw_entry, width=50)\r\nurl_entry.grid(row=1, column=1)\r\n\r\n# BUTTONS\r\nbutton1 = Button(tab3, text=\"Reset\", command=clear_url_entry, width=12, bg='#03A9F4', fg='#fff')\r\nbutton1.grid(row=4, column=0, padx=10, pady=10)\r\n\r\nbutton2 = Button(tab3, text=\"Get Text\", command=get_text, width=12, bg='#03A9F4', fg='#fff')\r\nbutton2.grid(row=4, column=1, padx=10, pady=10)\r\n\r\nbutton3 = Button(tab3, text=\"Open File\", width=12, command=openfiles, bg='#c5cae9')\r\nbutton3.grid(row=5, column=0, padx=10, pady=10)\r\n\r\nbutton4 = Button(tab3, text=\"Open PDF\", width=12, command=open_pdf, bg='#c5cae9')\r\nbutton4.grid(row=5, column=1, padx=10, pady=10)\r\n\r\nbutton5 = Button(tab3, text=\"SpaCy\", command=use_spacy, width=12, bg='red', fg='#fff')\r\nbutton5.grid(row=8, column=0, padx=10, pady=10)\r\n\r\nbutton6 = Button(tab3, text=\"Clear Result\", command=clear_display_result, width=12, bg='#03A9F4', fg='#fff')\r\nbutton6.grid(row=9, column=1, padx=10, pady=10)\r\n\r\nbutton7 = Button(tab3, text=\"NLTK\", command=use_nltk, width=12, bg='#03A9F4', fg='#fff')\r\nbutton7.grid(row=8, column=1, padx=10, pady=10)\r\n\r\nbutton8 = Button(tab3, text=\"Gensim\", command=use_gensim, width=12, bg='#03A9F4', fg='#fff')\r\nbutton8.grid(row=9, column=0, padx=10, pady=10)\r\n# Display Screen For Result\r\nurl_display = ScrolledText(tab3, height=10)\r\nurl_display.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\r\n\r\ntab3_display_text = ScrolledText(tab3, height=10)\r\ntab3_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\r\n\r\n\r\n\r\nl1 = Label(tab2, text=\"Enter URL To Summarize\")\r\nl1.grid(row=1, column=0)\r\n\r\nraw_entry1 = StringVar()\r\nurl_entry1 = Entry(tab2, textvariable=raw_entry, width=50)\r\nurl_entry1.grid(row=1, column=1)\r\n\r\n# BUTTONS\r\n\r\nbutton9 = Button(tab2, text=\"Reset\", command=clear_url_entry, width=12, bg='#03A9F4', fg='#fff')\r\nbutton9.grid(row=4, column=0, padx=10, pady=10)\r\n\r\nbutton10 = Button(tab2, text=\"Get Text\", command=get_text, width=12, bg='#03A9F4', fg='#fff')\r\nbutton10.grid(row=4, column=1, padx=10, pady=10)\r\n\r\nbutton11 = Button(tab2, text=\"Open File\", width=12, command=openfiles, bg='#c5cae9')\r\nbutton11.grid(row=5, column=0, padx=10, pady=10)\r\n\r\nbutton12 = Button(tab2, text=\"Open PDF\", width=12, command=open_pdf1, bg='#c5cae9')\r\nbutton12.grid(row=5, column=1, padx=10, pady=10)\r\n\r\nbutton13 = Button(tab2, text=\"Clear Result\", command=clear_display_result, width=12, bg='#03A9F4', fg='#fff')\r\nbutton13.grid(row=9, column=1, padx=10, pady=10)\r\n\r\nbutton14 = Button(tab2, text=\"Abstract\", command=get_summary, width=12, bg='#03A9F4', fg='#fff')\r\nbutton14.grid(row=9, column=0, padx=10, pady=10)\r\n\r\nurl_display1 = ScrolledText(tab2, height=10)\r\nurl_display1.grid(row=7, column=0, columnspan=3, padx=5, pady=5)\r\n\r\ntab2_display_text = ScrolledText(tab2, height=10)\r\ntab2_display_text.grid(row=11, column=0, columnspan=3, padx=5, pady=5)\r\n\r\nwindow.mainloop()\r\n\r\n",
"step-ids": [
8,
9,
13,
14,
15
]
}
|
[
8,
9,
13,
14,
15
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.append('../../../../libs/VASNet/')
<|reserved_special_token_0|>
sys.path.append('../../../config')
<|reserved_special_token_0|>
if __name__ == '__main__':
path_pretrained_model = cfg.PATH_DRDSN_PRETRAINED_MODEL
path_feature = cfg.PATH_FEATURE_GOOGLENET
from os import walk
f = []
for dirpath, dirnames, filenames in walk(path_feature):
f.extend(filenames)
break
for i in f:
features = np.load(os.path.join(path_feature, i))
score = get_VASNet_score(features, path_pretrained_model=
path_pretrained_model)
sys.exit(0)
<|reserved_special_token_1|>
import os, sys, glob
sys.path.append('../../../../libs/VASNet/')
from VASNet_frame_scoring_lib import *
sys.path.append('../../../config')
from config import *
if __name__ == '__main__':
path_pretrained_model = cfg.PATH_DRDSN_PRETRAINED_MODEL
path_feature = cfg.PATH_FEATURE_GOOGLENET
from os import walk
f = []
for dirpath, dirnames, filenames in walk(path_feature):
f.extend(filenames)
break
for i in f:
features = np.load(os.path.join(path_feature, i))
score = get_VASNet_score(features, path_pretrained_model=
path_pretrained_model)
sys.exit(0)
<|reserved_special_token_1|>
import os,sys,glob
sys.path.append("../../../../libs/VASNet/")
from VASNet_frame_scoring_lib import *
sys.path.append("../../../config")
from config import *
if __name__ == '__main__':
#************************************************************************
# Purpose: frame scoring (Summarizing Videos with Attention)
# Inputs:
# - path_pretrained_model: path pretrained model
# - path_feature: path feature extraction of video(' .npy' with shape: x,1024 (GoogLeNet))
# Output: Score
# Author: Trivl
#************************************************************************
path_pretrained_model = cfg.PATH_DRDSN_PRETRAINED_MODEL
path_feature = cfg.PATH_FEATURE_GOOGLENET
from os import walk
f = []
for (dirpath, dirnames, filenames) in walk(path_feature):
f.extend(filenames)
break
for i in f:
features = np.load(os.path.join(path_feature,i))
score = get_VASNet_score(features,path_pretrained_model=path_pretrained_model)
sys.exit(0)
|
flexible
|
{
"blob_id": "ce97da4aab2b9de40267730168690475c899526d",
"index": 3924,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nsys.path.append('../../../../libs/VASNet/')\n<mask token>\nsys.path.append('../../../config')\n<mask token>\nif __name__ == '__main__':\n path_pretrained_model = cfg.PATH_DRDSN_PRETRAINED_MODEL\n path_feature = cfg.PATH_FEATURE_GOOGLENET\n from os import walk\n f = []\n for dirpath, dirnames, filenames in walk(path_feature):\n f.extend(filenames)\n break\n for i in f:\n features = np.load(os.path.join(path_feature, i))\n score = get_VASNet_score(features, path_pretrained_model=\n path_pretrained_model)\n sys.exit(0)\n",
"step-3": "import os, sys, glob\nsys.path.append('../../../../libs/VASNet/')\nfrom VASNet_frame_scoring_lib import *\nsys.path.append('../../../config')\nfrom config import *\nif __name__ == '__main__':\n path_pretrained_model = cfg.PATH_DRDSN_PRETRAINED_MODEL\n path_feature = cfg.PATH_FEATURE_GOOGLENET\n from os import walk\n f = []\n for dirpath, dirnames, filenames in walk(path_feature):\n f.extend(filenames)\n break\n for i in f:\n features = np.load(os.path.join(path_feature, i))\n score = get_VASNet_score(features, path_pretrained_model=\n path_pretrained_model)\n sys.exit(0)\n",
"step-4": "import os,sys,glob\nsys.path.append(\"../../../../libs/VASNet/\")\nfrom VASNet_frame_scoring_lib import *\nsys.path.append(\"../../../config\")\nfrom config import *\n\n\nif __name__ == '__main__':\n #************************************************************************\n # Purpose: frame scoring (Summarizing Videos with Attention)\n # Inputs:\n # - path_pretrained_model: path pretrained model\n # - path_feature: path feature extraction of video(' .npy' with shape: x,1024 (GoogLeNet)) \n # Output: Score\n # Author: Trivl\n #************************************************************************\n\n path_pretrained_model = cfg.PATH_DRDSN_PRETRAINED_MODEL\n path_feature = cfg.PATH_FEATURE_GOOGLENET\n from os import walk\n f = []\n for (dirpath, dirnames, filenames) in walk(path_feature):\n f.extend(filenames)\n break\n for i in f:\n features = np.load(os.path.join(path_feature,i))\n score = get_VASNet_score(features,path_pretrained_model=path_pretrained_model)\n sys.exit(0)\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import requests
from google.cloud import datastore
import google.cloud.logging
###Helper functions
def report_error(error_text):
"""Logs error to Stackdriver.
:param error_text: The text to log to Stackdriver
:type error_text: string
"""
client = google.cloud.logging.Client()
logger = client.logger("automated_error_catch")
logger.log_text(error_text)
def get_secrets():
"""Fetches secrets from Datastore and returns them as a list.
"""
client = datastore.Client()
query = client.query(kind='env_vars')
entity = query.fetch()
secrets = list(entity)[0]
return secrets
def format_requisites(text, requisites):
"""If any item requisites specified, adds them to response text data for more holistic response.
:param text: The response text data to be formatted
:type text: string
:param requisites: Contains information food item must comply with (traits, allergens, etc)
:type requisites: dict
"""
traits_text = ''
allergens_text = ''
req_map = {'trait': {'mhealthy': 'healthy'},
'allergens': {'sesame-seed': 'sesame seeds',
'tree-nuts': 'tree nuts',
'wheat_barley_rye': 'wheat or barley or rye'}}
#If traits specified, extract into a string
for i, trait in enumerate(requisites['trait']):
if traits_text:
traits_text += ', '
traits_text += req_map['trait'].get(trait, trait)
traits_text = format_plural(traits_text.rstrip(', '))
#If allergens specified, extract into a string
for i, allergen in enumerate(requisites['allergens']):
if allergens_text:
allergens_text += ', '
allergens_text += req_map['allergens'].get(allergen, allergen)
allergens_text = format_plural(allergens_text.rstrip(', '))
allergens_text = allergens_text.replace('and', 'or')
#Requisite-specific language
if allergens_text:
allergens_text = ' without ' + allergens_text
if traits_text:
traits_text = ' that is ' + traits_text
#Return combined string
if (allergens_text or traits_text) and 'Sorry, that is not available' in text:
traits_text = traits_text.replace(' that is ', '')
text = text.replace('Sorry, ', 'Sorry, ' + traits_text + ' ')
text = text.replace('that is not available', '[meal]')
return text + allergens_text + ' is not available'
else:
return text + traits_text + allergens_text
def format_plural(text):
"""Adds 'and' before last item in list of items.
:param text: The string to be manipulated
:type text: string
"""
if ',' in text:
index = text.rfind(',') + 2
text = text[:index] + 'and ' + text[index:]
return text
def remove_spaces(url_block):
"""Removes spaces in url string to create valid url string.
:param url_block: The url string to be manipulated
:type search: string
"""
temp = ""
for i in range(len(url_block)):
if url_block[i] == ' ':
temp += '+'
else:
temp += url_block[i]
return temp
def check_meal_available(data, meal):
"""Searches response data to check if meal is available at specified location/date.
:param data: MDining API HTTP response data
:type data: dict
:param meal: Name of meal
:type meal: string
"""
for key in data['menu']['meal']:
if data['menu']['meal']['name'].upper() == meal.upper():
if 'course' in data['menu']['meal']:
return True
return False
return False
def check_course_available(data, course):
"""Searches response data to check if course is available in specified meal.
:param data: MDining API HTTP response data
:type data: dict
:param course: Name of course
:type course: string
"""
for i in range(len(data['menu']['meal']['course'])):
for key, value in data['menu']['meal']['course'][i].items():
if key == 'name':
if value.upper() == course.upper():
return True
return False
def check_item_specifications(item, traits, allergens):
"""Returns true if food item is satisfactory with specified traits and allergens.
:param item: Data of specific food item
:type item: dict
:param traits: List of specified traits item must have, can be empty
:type traits: list
:param allergens: List of allergens item cannot have, can be empty
:type allergens: list
"""
#Return false if allergens list isn't empty and any allergens found
if allergens and 'allergens' in item:
for allergen in allergens:
if allergen in item['allergens']:
return False
#Return true if traits list empty
if not traits:
return True
#Return false if traits list isn't empty and any traits are missing
if 'trait' in item:
for trait in traits:
if trait not in item['trait']:
return False
#All traits found, return true
return True
else:
return False
def get_items(data, requisites, formatted):
"""Returns string of food items of each course in response data for
fulfillmentText in response to Dialogflow.
:param data: MDining API HTTP response data
:type data: dict
:param requisites: Contains information food item must comply with (traits, allergens, etc)
:type requisites: dict
:param formatted: True/False - formats response string if true
:type formatted: boolean
"""
returndata = ""
traits = requisites['trait']
allergens = requisites['allergens']
if formatted:
prefix = '\t'
suffix = '\n'
else:
prefix = ''
suffix = ', '
for course in data['menu']['meal']['course']:
item_data = []
datatype = type(course['menuitem'])
if datatype is list:
item_data += course['menuitem']
else:
item_data.append(course['menuitem'])
for item in item_data:
if check_item_specifications(item, traits, allergens) and 'No Service at this Time' not in item['name']:
returndata += (prefix + (item['name']).rstrip(', ') + suffix)
return returndata
def find_item_formatting(possible_matches):
"""Formatting list of possible matches into more natural sentence structure
by removing redundancy:
[Chicken during lunch, chicken wings during lunch, and chicken patty during dinner] ->
[Chicken, chicken wings during lunch, and chicken patty during dinner]
:param possible_matches: List of food items in data that matched user input
:type possible_matches: list
"""
for i in range(len(possible_matches)):
if i == 0:
continue
words = possible_matches[i].split()
#If previous term has same ending ("Dinner") as current term, remove it
if possible_matches[i].split()[-1] == possible_matches[i - 1].split()[-1]:
#8 = amount of characters taken up by [' during ']
length = len(possible_matches[i].split()[-1]) + 8
possible_matches[i - 1] = possible_matches[i - 1][:length*-1]
return possible_matches
def find_matches(course_data, possible_matches, item_in, meal_name, requisites):
"""Appends matches of specified food item in data of an individual course to
list of possible matches.
:param course_data: Chosen course subsection of MDining API HTTP response data
:type course_data: dict
:param possible_matches: List of food items in data that matched user input
:type possible_matches: list
:param item_in: User input food item
:type item_in: string
:param meal_name: Name of meal
:type meal_name: string
:param requisites: Contains information food item must comply with (traits, allergens, etc)
:type requisites: dict
"""
traits = requisites['trait']
allergens = requisites['allergens']
item_data = []
datatype = type(course_data)
if datatype is list:
item_data += course_data
else:
item_data.append(course_data)
for item in item_data:
if check_item_specifications(item, traits, allergens) == False:
continue
if item_in.upper() in item['name'].upper():
if item['name'][-1] == ' ':
item['name'] = item['name'][:-1]
possible_matches.append(item['name'] + ' during ' + meal_name)
return possible_matches
#########################################################################
###Primary Handler Functions
def request_location_and_meal(date_in, loc_in, meal_in, requisites):
"""Handles searching for appropriate data response for valid specified
location and meal entities from ``findLocationAndMeal`` intent.
:param date_in: Input date
:type date_in: string
:param loc_in: Input location
:type loc_in: string
:param meal_in: Input meal
:type meal_in: string
:param requisites: Contains information food item must comply with (traits, allergens, etc)
:type requisites: dict
"""
#preset vars
url = 'http://api.studentlife.umich.edu/menu/xml2print.php?controller=&view=json'
location = '&location='
date = '&date='
meal = '&meal='
#API url concatenation
location += loc_in
meal += meal_in
date += str(date_in)
url = url + location + date + meal
url = remove_spaces(url)
#fetching json
data = requests.get(url).json()
#checking if specified meal available
if check_meal_available(data, meal_in):
returnstring = (get_items(data, requisites, False)).rstrip(', ')
return format_plural(returnstring)
else:
return "No meal is available"
#Handle meal item data request
def request_item(date_in, loc_in, item_in, meal_in, requisites):
"""Handles searching for appropriate data response for valid specified
location and food item entities (and meal entity if included) from ``findItem`` intent.
:param date_in: Input date
:type date_in: string
:param loc_in: Input location
:type loc_in: string
:param item_in: Input food item
:type item_in: string
:param meal_in: Input meal, can be empty string if not specified
:type meal_in: string
:param requisites: Contains information food item must comply with (traits, allergens, etc)
:type requisites: dict
"""
secrets = get_secrets()
url = secrets.get('m_dining_api_main')
location = '&location='
date = '&date='
meal = '&meal='
#API url concatenation
location += loc_in
date += str(date_in)
url = url + location + date + meal
url = remove_spaces(url)
if meal_in == '':
meal_entered = False
else:
meal_entered = True
#fetching json
data = requests.get(url).json()
possible_matches = []
#Loop through meals
for i in data['menu']['meal']:
#If meal specified, only check specified meal
if meal_entered and i['name'].upper() != meal_in.upper():
continue
#Skip meal if no food items available
if 'course' not in i:
continue
#Loop through food items in course
for j in i['course']:
for key, value in j.items():
if key == 'name':
course_data = j['menuitem']
meal_name = i['name']
#Append matches to specified item to possible_matches list
possible_matches = find_matches(course_data, possible_matches,
item_in, meal_name, requisites)
#Specified item found
if possible_matches:
possible_matches = find_item_formatting(possible_matches)
text = 'Yes, there is '
for i in range(len(possible_matches)):
if len(possible_matches) > 1 and (i == len(possible_matches) - 1):
text += ' and'
text += ' ' + possible_matches[i]
if i != len(possible_matches) - 1:
text += ','
#Specified item not found
else:
text = 'Sorry, that is not available'
return {'fulfillmentText': text}
|
normal
|
{
"blob_id": "bf2b3b74f772026328cdd04412455ee758c43d3f",
"index": 8142,
"step-1": "<mask token>\n\n\ndef report_error(error_text):\n \"\"\"Logs error to Stackdriver.\n :param error_text: The text to log to Stackdriver\n :type error_text: string\n \"\"\"\n client = google.cloud.logging.Client()\n logger = client.logger('automated_error_catch')\n logger.log_text(error_text)\n\n\n<mask token>\n\n\ndef format_requisites(text, requisites):\n \"\"\"If any item requisites specified, adds them to response text data for more holistic response.\n\n :param text: The response text data to be formatted\n :type text: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n traits_text = ''\n allergens_text = ''\n req_map = {'trait': {'mhealthy': 'healthy'}, 'allergens': {\n 'sesame-seed': 'sesame seeds', 'tree-nuts': 'tree nuts',\n 'wheat_barley_rye': 'wheat or barley or rye'}}\n for i, trait in enumerate(requisites['trait']):\n if traits_text:\n traits_text += ', '\n traits_text += req_map['trait'].get(trait, trait)\n traits_text = format_plural(traits_text.rstrip(', '))\n for i, allergen in enumerate(requisites['allergens']):\n if allergens_text:\n allergens_text += ', '\n allergens_text += req_map['allergens'].get(allergen, allergen)\n allergens_text = format_plural(allergens_text.rstrip(', '))\n allergens_text = allergens_text.replace('and', 'or')\n if allergens_text:\n allergens_text = ' without ' + allergens_text\n if traits_text:\n traits_text = ' that is ' + traits_text\n if (allergens_text or traits_text\n ) and 'Sorry, that is not available' in text:\n traits_text = traits_text.replace(' that is ', '')\n text = text.replace('Sorry, ', 'Sorry, ' + traits_text + ' ')\n text = text.replace('that is not available', '[meal]')\n return text + allergens_text + ' is not available'\n else:\n return text + traits_text + allergens_text\n\n\ndef format_plural(text):\n \"\"\"Adds 'and' before last item in list of items.\n\n :param text: The string to be manipulated\n :type text: string\n \"\"\"\n if ',' in text:\n index = text.rfind(',') + 2\n text = text[:index] + 'and ' + text[index:]\n return text\n\n\ndef remove_spaces(url_block):\n \"\"\"Removes spaces in url string to create valid url string.\n\n :param url_block: The url string to be manipulated\n :type search: string\n \"\"\"\n temp = ''\n for i in range(len(url_block)):\n if url_block[i] == ' ':\n temp += '+'\n else:\n temp += url_block[i]\n return temp\n\n\ndef check_meal_available(data, meal):\n \"\"\"Searches response data to check if meal is available at specified location/date.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param meal: Name of meal\n :type meal: string\n \"\"\"\n for key in data['menu']['meal']:\n if data['menu']['meal']['name'].upper() == meal.upper():\n if 'course' in data['menu']['meal']:\n return True\n return False\n return False\n\n\ndef check_course_available(data, course):\n \"\"\"Searches response data to check if course is available in specified meal.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param course: Name of course\n :type course: string\n \"\"\"\n for i in range(len(data['menu']['meal']['course'])):\n for key, value in data['menu']['meal']['course'][i].items():\n if key == 'name':\n if value.upper() == course.upper():\n return True\n return False\n\n\ndef check_item_specifications(item, traits, allergens):\n \"\"\"Returns true if food item is satisfactory with specified traits and allergens.\n\n :param item: Data of specific food item\n :type item: dict\n :param traits: List of specified traits item must have, can be empty\n :type traits: list\n :param allergens: List of allergens item cannot have, can be empty\n :type allergens: list\n \"\"\"\n if allergens and 'allergens' in item:\n for allergen in allergens:\n if allergen in item['allergens']:\n return False\n if not traits:\n return True\n if 'trait' in item:\n for trait in traits:\n if trait not in item['trait']:\n return False\n return True\n else:\n return False\n\n\ndef get_items(data, requisites, formatted):\n \"\"\"Returns string of food items of each course in response data for\n fulfillmentText in response to Dialogflow.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n :param formatted: True/False - formats response string if true\n :type formatted: boolean\n \"\"\"\n returndata = ''\n traits = requisites['trait']\n allergens = requisites['allergens']\n if formatted:\n prefix = '\\t'\n suffix = '\\n'\n else:\n prefix = ''\n suffix = ', '\n for course in data['menu']['meal']['course']:\n item_data = []\n datatype = type(course['menuitem'])\n if datatype is list:\n item_data += course['menuitem']\n else:\n item_data.append(course['menuitem'])\n for item in item_data:\n if check_item_specifications(item, traits, allergens\n ) and 'No Service at this Time' not in item['name']:\n returndata += prefix + item['name'].rstrip(', ') + suffix\n return returndata\n\n\n<mask token>\n\n\ndef find_matches(course_data, possible_matches, item_in, meal_name, requisites\n ):\n \"\"\"Appends matches of specified food item in data of an individual course to\n list of possible matches.\n\n :param course_data: Chosen course subsection of MDining API HTTP response data\n :type course_data: dict\n :param possible_matches: List of food items in data that matched user input\n :type possible_matches: list\n :param item_in: User input food item\n :type item_in: string\n :param meal_name: Name of meal\n :type meal_name: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n traits = requisites['trait']\n allergens = requisites['allergens']\n item_data = []\n datatype = type(course_data)\n if datatype is list:\n item_data += course_data\n else:\n item_data.append(course_data)\n for item in item_data:\n if check_item_specifications(item, traits, allergens) == False:\n continue\n if item_in.upper() in item['name'].upper():\n if item['name'][-1] == ' ':\n item['name'] = item['name'][:-1]\n possible_matches.append(item['name'] + ' during ' + meal_name)\n return possible_matches\n\n\ndef request_location_and_meal(date_in, loc_in, meal_in, requisites):\n \"\"\"Handles searching for appropriate data response for valid specified\n location and meal entities from ``findLocationAndMeal`` intent.\n\n :param date_in: Input date\n :type date_in: string\n :param loc_in: Input location\n :type loc_in: string\n :param meal_in: Input meal\n :type meal_in: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n url = (\n 'http://api.studentlife.umich.edu/menu/xml2print.php?controller=&view=json'\n )\n location = '&location='\n date = '&date='\n meal = '&meal='\n location += loc_in\n meal += meal_in\n date += str(date_in)\n url = url + location + date + meal\n url = remove_spaces(url)\n data = requests.get(url).json()\n if check_meal_available(data, meal_in):\n returnstring = get_items(data, requisites, False).rstrip(', ')\n return format_plural(returnstring)\n else:\n return 'No meal is available'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef report_error(error_text):\n \"\"\"Logs error to Stackdriver.\n :param error_text: The text to log to Stackdriver\n :type error_text: string\n \"\"\"\n client = google.cloud.logging.Client()\n logger = client.logger('automated_error_catch')\n logger.log_text(error_text)\n\n\n<mask token>\n\n\ndef format_requisites(text, requisites):\n \"\"\"If any item requisites specified, adds them to response text data for more holistic response.\n\n :param text: The response text data to be formatted\n :type text: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n traits_text = ''\n allergens_text = ''\n req_map = {'trait': {'mhealthy': 'healthy'}, 'allergens': {\n 'sesame-seed': 'sesame seeds', 'tree-nuts': 'tree nuts',\n 'wheat_barley_rye': 'wheat or barley or rye'}}\n for i, trait in enumerate(requisites['trait']):\n if traits_text:\n traits_text += ', '\n traits_text += req_map['trait'].get(trait, trait)\n traits_text = format_plural(traits_text.rstrip(', '))\n for i, allergen in enumerate(requisites['allergens']):\n if allergens_text:\n allergens_text += ', '\n allergens_text += req_map['allergens'].get(allergen, allergen)\n allergens_text = format_plural(allergens_text.rstrip(', '))\n allergens_text = allergens_text.replace('and', 'or')\n if allergens_text:\n allergens_text = ' without ' + allergens_text\n if traits_text:\n traits_text = ' that is ' + traits_text\n if (allergens_text or traits_text\n ) and 'Sorry, that is not available' in text:\n traits_text = traits_text.replace(' that is ', '')\n text = text.replace('Sorry, ', 'Sorry, ' + traits_text + ' ')\n text = text.replace('that is not available', '[meal]')\n return text + allergens_text + ' is not available'\n else:\n return text + traits_text + allergens_text\n\n\ndef format_plural(text):\n \"\"\"Adds 'and' before last item in list of items.\n\n :param text: The string to be manipulated\n :type text: string\n \"\"\"\n if ',' in text:\n index = text.rfind(',') + 2\n text = text[:index] + 'and ' + text[index:]\n return text\n\n\ndef remove_spaces(url_block):\n \"\"\"Removes spaces in url string to create valid url string.\n\n :param url_block: The url string to be manipulated\n :type search: string\n \"\"\"\n temp = ''\n for i in range(len(url_block)):\n if url_block[i] == ' ':\n temp += '+'\n else:\n temp += url_block[i]\n return temp\n\n\ndef check_meal_available(data, meal):\n \"\"\"Searches response data to check if meal is available at specified location/date.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param meal: Name of meal\n :type meal: string\n \"\"\"\n for key in data['menu']['meal']:\n if data['menu']['meal']['name'].upper() == meal.upper():\n if 'course' in data['menu']['meal']:\n return True\n return False\n return False\n\n\ndef check_course_available(data, course):\n \"\"\"Searches response data to check if course is available in specified meal.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param course: Name of course\n :type course: string\n \"\"\"\n for i in range(len(data['menu']['meal']['course'])):\n for key, value in data['menu']['meal']['course'][i].items():\n if key == 'name':\n if value.upper() == course.upper():\n return True\n return False\n\n\ndef check_item_specifications(item, traits, allergens):\n \"\"\"Returns true if food item is satisfactory with specified traits and allergens.\n\n :param item: Data of specific food item\n :type item: dict\n :param traits: List of specified traits item must have, can be empty\n :type traits: list\n :param allergens: List of allergens item cannot have, can be empty\n :type allergens: list\n \"\"\"\n if allergens and 'allergens' in item:\n for allergen in allergens:\n if allergen in item['allergens']:\n return False\n if not traits:\n return True\n if 'trait' in item:\n for trait in traits:\n if trait not in item['trait']:\n return False\n return True\n else:\n return False\n\n\ndef get_items(data, requisites, formatted):\n \"\"\"Returns string of food items of each course in response data for\n fulfillmentText in response to Dialogflow.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n :param formatted: True/False - formats response string if true\n :type formatted: boolean\n \"\"\"\n returndata = ''\n traits = requisites['trait']\n allergens = requisites['allergens']\n if formatted:\n prefix = '\\t'\n suffix = '\\n'\n else:\n prefix = ''\n suffix = ', '\n for course in data['menu']['meal']['course']:\n item_data = []\n datatype = type(course['menuitem'])\n if datatype is list:\n item_data += course['menuitem']\n else:\n item_data.append(course['menuitem'])\n for item in item_data:\n if check_item_specifications(item, traits, allergens\n ) and 'No Service at this Time' not in item['name']:\n returndata += prefix + item['name'].rstrip(', ') + suffix\n return returndata\n\n\ndef find_item_formatting(possible_matches):\n \"\"\"Formatting list of possible matches into more natural sentence structure\n by removing redundancy:\n [Chicken during lunch, chicken wings during lunch, and chicken patty during dinner] ->\n [Chicken, chicken wings during lunch, and chicken patty during dinner]\n\n :param possible_matches: List of food items in data that matched user input\n :type possible_matches: list\n \"\"\"\n for i in range(len(possible_matches)):\n if i == 0:\n continue\n words = possible_matches[i].split()\n if possible_matches[i].split()[-1] == possible_matches[i - 1].split()[\n -1]:\n length = len(possible_matches[i].split()[-1]) + 8\n possible_matches[i - 1] = possible_matches[i - 1][:length * -1]\n return possible_matches\n\n\ndef find_matches(course_data, possible_matches, item_in, meal_name, requisites\n ):\n \"\"\"Appends matches of specified food item in data of an individual course to\n list of possible matches.\n\n :param course_data: Chosen course subsection of MDining API HTTP response data\n :type course_data: dict\n :param possible_matches: List of food items in data that matched user input\n :type possible_matches: list\n :param item_in: User input food item\n :type item_in: string\n :param meal_name: Name of meal\n :type meal_name: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n traits = requisites['trait']\n allergens = requisites['allergens']\n item_data = []\n datatype = type(course_data)\n if datatype is list:\n item_data += course_data\n else:\n item_data.append(course_data)\n for item in item_data:\n if check_item_specifications(item, traits, allergens) == False:\n continue\n if item_in.upper() in item['name'].upper():\n if item['name'][-1] == ' ':\n item['name'] = item['name'][:-1]\n possible_matches.append(item['name'] + ' during ' + meal_name)\n return possible_matches\n\n\ndef request_location_and_meal(date_in, loc_in, meal_in, requisites):\n \"\"\"Handles searching for appropriate data response for valid specified\n location and meal entities from ``findLocationAndMeal`` intent.\n\n :param date_in: Input date\n :type date_in: string\n :param loc_in: Input location\n :type loc_in: string\n :param meal_in: Input meal\n :type meal_in: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n url = (\n 'http://api.studentlife.umich.edu/menu/xml2print.php?controller=&view=json'\n )\n location = '&location='\n date = '&date='\n meal = '&meal='\n location += loc_in\n meal += meal_in\n date += str(date_in)\n url = url + location + date + meal\n url = remove_spaces(url)\n data = requests.get(url).json()\n if check_meal_available(data, meal_in):\n returnstring = get_items(data, requisites, False).rstrip(', ')\n return format_plural(returnstring)\n else:\n return 'No meal is available'\n\n\ndef request_item(date_in, loc_in, item_in, meal_in, requisites):\n \"\"\"Handles searching for appropriate data response for valid specified\n location and food item entities (and meal entity if included) from ``findItem`` intent.\n\n :param date_in: Input date\n :type date_in: string\n :param loc_in: Input location\n :type loc_in: string\n :param item_in: Input food item\n :type item_in: string\n :param meal_in: Input meal, can be empty string if not specified\n :type meal_in: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n secrets = get_secrets()\n url = secrets.get('m_dining_api_main')\n location = '&location='\n date = '&date='\n meal = '&meal='\n location += loc_in\n date += str(date_in)\n url = url + location + date + meal\n url = remove_spaces(url)\n if meal_in == '':\n meal_entered = False\n else:\n meal_entered = True\n data = requests.get(url).json()\n possible_matches = []\n for i in data['menu']['meal']:\n if meal_entered and i['name'].upper() != meal_in.upper():\n continue\n if 'course' not in i:\n continue\n for j in i['course']:\n for key, value in j.items():\n if key == 'name':\n course_data = j['menuitem']\n meal_name = i['name']\n possible_matches = find_matches(course_data,\n possible_matches, item_in, meal_name, requisites)\n if possible_matches:\n possible_matches = find_item_formatting(possible_matches)\n text = 'Yes, there is '\n for i in range(len(possible_matches)):\n if len(possible_matches) > 1 and i == len(possible_matches) - 1:\n text += ' and'\n text += ' ' + possible_matches[i]\n if i != len(possible_matches) - 1:\n text += ','\n else:\n text = 'Sorry, that is not available'\n return {'fulfillmentText': text}\n",
"step-3": "<mask token>\n\n\ndef report_error(error_text):\n \"\"\"Logs error to Stackdriver.\n :param error_text: The text to log to Stackdriver\n :type error_text: string\n \"\"\"\n client = google.cloud.logging.Client()\n logger = client.logger('automated_error_catch')\n logger.log_text(error_text)\n\n\ndef get_secrets():\n \"\"\"Fetches secrets from Datastore and returns them as a list.\n \"\"\"\n client = datastore.Client()\n query = client.query(kind='env_vars')\n entity = query.fetch()\n secrets = list(entity)[0]\n return secrets\n\n\ndef format_requisites(text, requisites):\n \"\"\"If any item requisites specified, adds them to response text data for more holistic response.\n\n :param text: The response text data to be formatted\n :type text: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n traits_text = ''\n allergens_text = ''\n req_map = {'trait': {'mhealthy': 'healthy'}, 'allergens': {\n 'sesame-seed': 'sesame seeds', 'tree-nuts': 'tree nuts',\n 'wheat_barley_rye': 'wheat or barley or rye'}}\n for i, trait in enumerate(requisites['trait']):\n if traits_text:\n traits_text += ', '\n traits_text += req_map['trait'].get(trait, trait)\n traits_text = format_plural(traits_text.rstrip(', '))\n for i, allergen in enumerate(requisites['allergens']):\n if allergens_text:\n allergens_text += ', '\n allergens_text += req_map['allergens'].get(allergen, allergen)\n allergens_text = format_plural(allergens_text.rstrip(', '))\n allergens_text = allergens_text.replace('and', 'or')\n if allergens_text:\n allergens_text = ' without ' + allergens_text\n if traits_text:\n traits_text = ' that is ' + traits_text\n if (allergens_text or traits_text\n ) and 'Sorry, that is not available' in text:\n traits_text = traits_text.replace(' that is ', '')\n text = text.replace('Sorry, ', 'Sorry, ' + traits_text + ' ')\n text = text.replace('that is not available', '[meal]')\n return text + allergens_text + ' is not available'\n else:\n return text + traits_text + allergens_text\n\n\ndef format_plural(text):\n \"\"\"Adds 'and' before last item in list of items.\n\n :param text: The string to be manipulated\n :type text: string\n \"\"\"\n if ',' in text:\n index = text.rfind(',') + 2\n text = text[:index] + 'and ' + text[index:]\n return text\n\n\ndef remove_spaces(url_block):\n \"\"\"Removes spaces in url string to create valid url string.\n\n :param url_block: The url string to be manipulated\n :type search: string\n \"\"\"\n temp = ''\n for i in range(len(url_block)):\n if url_block[i] == ' ':\n temp += '+'\n else:\n temp += url_block[i]\n return temp\n\n\ndef check_meal_available(data, meal):\n \"\"\"Searches response data to check if meal is available at specified location/date.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param meal: Name of meal\n :type meal: string\n \"\"\"\n for key in data['menu']['meal']:\n if data['menu']['meal']['name'].upper() == meal.upper():\n if 'course' in data['menu']['meal']:\n return True\n return False\n return False\n\n\ndef check_course_available(data, course):\n \"\"\"Searches response data to check if course is available in specified meal.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param course: Name of course\n :type course: string\n \"\"\"\n for i in range(len(data['menu']['meal']['course'])):\n for key, value in data['menu']['meal']['course'][i].items():\n if key == 'name':\n if value.upper() == course.upper():\n return True\n return False\n\n\ndef check_item_specifications(item, traits, allergens):\n \"\"\"Returns true if food item is satisfactory with specified traits and allergens.\n\n :param item: Data of specific food item\n :type item: dict\n :param traits: List of specified traits item must have, can be empty\n :type traits: list\n :param allergens: List of allergens item cannot have, can be empty\n :type allergens: list\n \"\"\"\n if allergens and 'allergens' in item:\n for allergen in allergens:\n if allergen in item['allergens']:\n return False\n if not traits:\n return True\n if 'trait' in item:\n for trait in traits:\n if trait not in item['trait']:\n return False\n return True\n else:\n return False\n\n\ndef get_items(data, requisites, formatted):\n \"\"\"Returns string of food items of each course in response data for\n fulfillmentText in response to Dialogflow.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n :param formatted: True/False - formats response string if true\n :type formatted: boolean\n \"\"\"\n returndata = ''\n traits = requisites['trait']\n allergens = requisites['allergens']\n if formatted:\n prefix = '\\t'\n suffix = '\\n'\n else:\n prefix = ''\n suffix = ', '\n for course in data['menu']['meal']['course']:\n item_data = []\n datatype = type(course['menuitem'])\n if datatype is list:\n item_data += course['menuitem']\n else:\n item_data.append(course['menuitem'])\n for item in item_data:\n if check_item_specifications(item, traits, allergens\n ) and 'No Service at this Time' not in item['name']:\n returndata += prefix + item['name'].rstrip(', ') + suffix\n return returndata\n\n\ndef find_item_formatting(possible_matches):\n \"\"\"Formatting list of possible matches into more natural sentence structure\n by removing redundancy:\n [Chicken during lunch, chicken wings during lunch, and chicken patty during dinner] ->\n [Chicken, chicken wings during lunch, and chicken patty during dinner]\n\n :param possible_matches: List of food items in data that matched user input\n :type possible_matches: list\n \"\"\"\n for i in range(len(possible_matches)):\n if i == 0:\n continue\n words = possible_matches[i].split()\n if possible_matches[i].split()[-1] == possible_matches[i - 1].split()[\n -1]:\n length = len(possible_matches[i].split()[-1]) + 8\n possible_matches[i - 1] = possible_matches[i - 1][:length * -1]\n return possible_matches\n\n\ndef find_matches(course_data, possible_matches, item_in, meal_name, requisites\n ):\n \"\"\"Appends matches of specified food item in data of an individual course to\n list of possible matches.\n\n :param course_data: Chosen course subsection of MDining API HTTP response data\n :type course_data: dict\n :param possible_matches: List of food items in data that matched user input\n :type possible_matches: list\n :param item_in: User input food item\n :type item_in: string\n :param meal_name: Name of meal\n :type meal_name: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n traits = requisites['trait']\n allergens = requisites['allergens']\n item_data = []\n datatype = type(course_data)\n if datatype is list:\n item_data += course_data\n else:\n item_data.append(course_data)\n for item in item_data:\n if check_item_specifications(item, traits, allergens) == False:\n continue\n if item_in.upper() in item['name'].upper():\n if item['name'][-1] == ' ':\n item['name'] = item['name'][:-1]\n possible_matches.append(item['name'] + ' during ' + meal_name)\n return possible_matches\n\n\ndef request_location_and_meal(date_in, loc_in, meal_in, requisites):\n \"\"\"Handles searching for appropriate data response for valid specified\n location and meal entities from ``findLocationAndMeal`` intent.\n\n :param date_in: Input date\n :type date_in: string\n :param loc_in: Input location\n :type loc_in: string\n :param meal_in: Input meal\n :type meal_in: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n url = (\n 'http://api.studentlife.umich.edu/menu/xml2print.php?controller=&view=json'\n )\n location = '&location='\n date = '&date='\n meal = '&meal='\n location += loc_in\n meal += meal_in\n date += str(date_in)\n url = url + location + date + meal\n url = remove_spaces(url)\n data = requests.get(url).json()\n if check_meal_available(data, meal_in):\n returnstring = get_items(data, requisites, False).rstrip(', ')\n return format_plural(returnstring)\n else:\n return 'No meal is available'\n\n\ndef request_item(date_in, loc_in, item_in, meal_in, requisites):\n \"\"\"Handles searching for appropriate data response for valid specified\n location and food item entities (and meal entity if included) from ``findItem`` intent.\n\n :param date_in: Input date\n :type date_in: string\n :param loc_in: Input location\n :type loc_in: string\n :param item_in: Input food item\n :type item_in: string\n :param meal_in: Input meal, can be empty string if not specified\n :type meal_in: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n secrets = get_secrets()\n url = secrets.get('m_dining_api_main')\n location = '&location='\n date = '&date='\n meal = '&meal='\n location += loc_in\n date += str(date_in)\n url = url + location + date + meal\n url = remove_spaces(url)\n if meal_in == '':\n meal_entered = False\n else:\n meal_entered = True\n data = requests.get(url).json()\n possible_matches = []\n for i in data['menu']['meal']:\n if meal_entered and i['name'].upper() != meal_in.upper():\n continue\n if 'course' not in i:\n continue\n for j in i['course']:\n for key, value in j.items():\n if key == 'name':\n course_data = j['menuitem']\n meal_name = i['name']\n possible_matches = find_matches(course_data,\n possible_matches, item_in, meal_name, requisites)\n if possible_matches:\n possible_matches = find_item_formatting(possible_matches)\n text = 'Yes, there is '\n for i in range(len(possible_matches)):\n if len(possible_matches) > 1 and i == len(possible_matches) - 1:\n text += ' and'\n text += ' ' + possible_matches[i]\n if i != len(possible_matches) - 1:\n text += ','\n else:\n text = 'Sorry, that is not available'\n return {'fulfillmentText': text}\n",
"step-4": "import requests\nfrom google.cloud import datastore\nimport google.cloud.logging\n\n\ndef report_error(error_text):\n \"\"\"Logs error to Stackdriver.\n :param error_text: The text to log to Stackdriver\n :type error_text: string\n \"\"\"\n client = google.cloud.logging.Client()\n logger = client.logger('automated_error_catch')\n logger.log_text(error_text)\n\n\ndef get_secrets():\n \"\"\"Fetches secrets from Datastore and returns them as a list.\n \"\"\"\n client = datastore.Client()\n query = client.query(kind='env_vars')\n entity = query.fetch()\n secrets = list(entity)[0]\n return secrets\n\n\ndef format_requisites(text, requisites):\n \"\"\"If any item requisites specified, adds them to response text data for more holistic response.\n\n :param text: The response text data to be formatted\n :type text: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n traits_text = ''\n allergens_text = ''\n req_map = {'trait': {'mhealthy': 'healthy'}, 'allergens': {\n 'sesame-seed': 'sesame seeds', 'tree-nuts': 'tree nuts',\n 'wheat_barley_rye': 'wheat or barley or rye'}}\n for i, trait in enumerate(requisites['trait']):\n if traits_text:\n traits_text += ', '\n traits_text += req_map['trait'].get(trait, trait)\n traits_text = format_plural(traits_text.rstrip(', '))\n for i, allergen in enumerate(requisites['allergens']):\n if allergens_text:\n allergens_text += ', '\n allergens_text += req_map['allergens'].get(allergen, allergen)\n allergens_text = format_plural(allergens_text.rstrip(', '))\n allergens_text = allergens_text.replace('and', 'or')\n if allergens_text:\n allergens_text = ' without ' + allergens_text\n if traits_text:\n traits_text = ' that is ' + traits_text\n if (allergens_text or traits_text\n ) and 'Sorry, that is not available' in text:\n traits_text = traits_text.replace(' that is ', '')\n text = text.replace('Sorry, ', 'Sorry, ' + traits_text + ' ')\n text = text.replace('that is not available', '[meal]')\n return text + allergens_text + ' is not available'\n else:\n return text + traits_text + allergens_text\n\n\ndef format_plural(text):\n \"\"\"Adds 'and' before last item in list of items.\n\n :param text: The string to be manipulated\n :type text: string\n \"\"\"\n if ',' in text:\n index = text.rfind(',') + 2\n text = text[:index] + 'and ' + text[index:]\n return text\n\n\ndef remove_spaces(url_block):\n \"\"\"Removes spaces in url string to create valid url string.\n\n :param url_block: The url string to be manipulated\n :type search: string\n \"\"\"\n temp = ''\n for i in range(len(url_block)):\n if url_block[i] == ' ':\n temp += '+'\n else:\n temp += url_block[i]\n return temp\n\n\ndef check_meal_available(data, meal):\n \"\"\"Searches response data to check if meal is available at specified location/date.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param meal: Name of meal\n :type meal: string\n \"\"\"\n for key in data['menu']['meal']:\n if data['menu']['meal']['name'].upper() == meal.upper():\n if 'course' in data['menu']['meal']:\n return True\n return False\n return False\n\n\ndef check_course_available(data, course):\n \"\"\"Searches response data to check if course is available in specified meal.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param course: Name of course\n :type course: string\n \"\"\"\n for i in range(len(data['menu']['meal']['course'])):\n for key, value in data['menu']['meal']['course'][i].items():\n if key == 'name':\n if value.upper() == course.upper():\n return True\n return False\n\n\ndef check_item_specifications(item, traits, allergens):\n \"\"\"Returns true if food item is satisfactory with specified traits and allergens.\n\n :param item: Data of specific food item\n :type item: dict\n :param traits: List of specified traits item must have, can be empty\n :type traits: list\n :param allergens: List of allergens item cannot have, can be empty\n :type allergens: list\n \"\"\"\n if allergens and 'allergens' in item:\n for allergen in allergens:\n if allergen in item['allergens']:\n return False\n if not traits:\n return True\n if 'trait' in item:\n for trait in traits:\n if trait not in item['trait']:\n return False\n return True\n else:\n return False\n\n\ndef get_items(data, requisites, formatted):\n \"\"\"Returns string of food items of each course in response data for\n fulfillmentText in response to Dialogflow.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n :param formatted: True/False - formats response string if true\n :type formatted: boolean\n \"\"\"\n returndata = ''\n traits = requisites['trait']\n allergens = requisites['allergens']\n if formatted:\n prefix = '\\t'\n suffix = '\\n'\n else:\n prefix = ''\n suffix = ', '\n for course in data['menu']['meal']['course']:\n item_data = []\n datatype = type(course['menuitem'])\n if datatype is list:\n item_data += course['menuitem']\n else:\n item_data.append(course['menuitem'])\n for item in item_data:\n if check_item_specifications(item, traits, allergens\n ) and 'No Service at this Time' not in item['name']:\n returndata += prefix + item['name'].rstrip(', ') + suffix\n return returndata\n\n\ndef find_item_formatting(possible_matches):\n \"\"\"Formatting list of possible matches into more natural sentence structure\n by removing redundancy:\n [Chicken during lunch, chicken wings during lunch, and chicken patty during dinner] ->\n [Chicken, chicken wings during lunch, and chicken patty during dinner]\n\n :param possible_matches: List of food items in data that matched user input\n :type possible_matches: list\n \"\"\"\n for i in range(len(possible_matches)):\n if i == 0:\n continue\n words = possible_matches[i].split()\n if possible_matches[i].split()[-1] == possible_matches[i - 1].split()[\n -1]:\n length = len(possible_matches[i].split()[-1]) + 8\n possible_matches[i - 1] = possible_matches[i - 1][:length * -1]\n return possible_matches\n\n\ndef find_matches(course_data, possible_matches, item_in, meal_name, requisites\n ):\n \"\"\"Appends matches of specified food item in data of an individual course to\n list of possible matches.\n\n :param course_data: Chosen course subsection of MDining API HTTP response data\n :type course_data: dict\n :param possible_matches: List of food items in data that matched user input\n :type possible_matches: list\n :param item_in: User input food item\n :type item_in: string\n :param meal_name: Name of meal\n :type meal_name: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n traits = requisites['trait']\n allergens = requisites['allergens']\n item_data = []\n datatype = type(course_data)\n if datatype is list:\n item_data += course_data\n else:\n item_data.append(course_data)\n for item in item_data:\n if check_item_specifications(item, traits, allergens) == False:\n continue\n if item_in.upper() in item['name'].upper():\n if item['name'][-1] == ' ':\n item['name'] = item['name'][:-1]\n possible_matches.append(item['name'] + ' during ' + meal_name)\n return possible_matches\n\n\ndef request_location_and_meal(date_in, loc_in, meal_in, requisites):\n \"\"\"Handles searching for appropriate data response for valid specified\n location and meal entities from ``findLocationAndMeal`` intent.\n\n :param date_in: Input date\n :type date_in: string\n :param loc_in: Input location\n :type loc_in: string\n :param meal_in: Input meal\n :type meal_in: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n url = (\n 'http://api.studentlife.umich.edu/menu/xml2print.php?controller=&view=json'\n )\n location = '&location='\n date = '&date='\n meal = '&meal='\n location += loc_in\n meal += meal_in\n date += str(date_in)\n url = url + location + date + meal\n url = remove_spaces(url)\n data = requests.get(url).json()\n if check_meal_available(data, meal_in):\n returnstring = get_items(data, requisites, False).rstrip(', ')\n return format_plural(returnstring)\n else:\n return 'No meal is available'\n\n\ndef request_item(date_in, loc_in, item_in, meal_in, requisites):\n \"\"\"Handles searching for appropriate data response for valid specified\n location and food item entities (and meal entity if included) from ``findItem`` intent.\n\n :param date_in: Input date\n :type date_in: string\n :param loc_in: Input location\n :type loc_in: string\n :param item_in: Input food item\n :type item_in: string\n :param meal_in: Input meal, can be empty string if not specified\n :type meal_in: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n secrets = get_secrets()\n url = secrets.get('m_dining_api_main')\n location = '&location='\n date = '&date='\n meal = '&meal='\n location += loc_in\n date += str(date_in)\n url = url + location + date + meal\n url = remove_spaces(url)\n if meal_in == '':\n meal_entered = False\n else:\n meal_entered = True\n data = requests.get(url).json()\n possible_matches = []\n for i in data['menu']['meal']:\n if meal_entered and i['name'].upper() != meal_in.upper():\n continue\n if 'course' not in i:\n continue\n for j in i['course']:\n for key, value in j.items():\n if key == 'name':\n course_data = j['menuitem']\n meal_name = i['name']\n possible_matches = find_matches(course_data,\n possible_matches, item_in, meal_name, requisites)\n if possible_matches:\n possible_matches = find_item_formatting(possible_matches)\n text = 'Yes, there is '\n for i in range(len(possible_matches)):\n if len(possible_matches) > 1 and i == len(possible_matches) - 1:\n text += ' and'\n text += ' ' + possible_matches[i]\n if i != len(possible_matches) - 1:\n text += ','\n else:\n text = 'Sorry, that is not available'\n return {'fulfillmentText': text}\n",
"step-5": "import requests\nfrom google.cloud import datastore\nimport google.cloud.logging\n\n###Helper functions\n\ndef report_error(error_text):\n \"\"\"Logs error to Stackdriver.\n :param error_text: The text to log to Stackdriver\n :type error_text: string\n \"\"\"\n client = google.cloud.logging.Client()\n logger = client.logger(\"automated_error_catch\")\n logger.log_text(error_text)\n\ndef get_secrets():\n \"\"\"Fetches secrets from Datastore and returns them as a list.\n \"\"\"\n client = datastore.Client()\n query = client.query(kind='env_vars')\n entity = query.fetch()\n secrets = list(entity)[0]\n return secrets\n\ndef format_requisites(text, requisites):\n \"\"\"If any item requisites specified, adds them to response text data for more holistic response.\n\n :param text: The response text data to be formatted\n :type text: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n traits_text = ''\n allergens_text = ''\n\n req_map = {'trait': {'mhealthy': 'healthy'},\n 'allergens': {'sesame-seed': 'sesame seeds',\n 'tree-nuts': 'tree nuts',\n 'wheat_barley_rye': 'wheat or barley or rye'}}\n\n #If traits specified, extract into a string\n for i, trait in enumerate(requisites['trait']):\n if traits_text:\n traits_text += ', '\n traits_text += req_map['trait'].get(trait, trait)\n traits_text = format_plural(traits_text.rstrip(', '))\n\n #If allergens specified, extract into a string\n for i, allergen in enumerate(requisites['allergens']):\n if allergens_text:\n allergens_text += ', '\n allergens_text += req_map['allergens'].get(allergen, allergen)\n allergens_text = format_plural(allergens_text.rstrip(', '))\n allergens_text = allergens_text.replace('and', 'or')\n\n #Requisite-specific language\n if allergens_text:\n allergens_text = ' without ' + allergens_text\n if traits_text:\n traits_text = ' that is ' + traits_text\n\n #Return combined string\n if (allergens_text or traits_text) and 'Sorry, that is not available' in text:\n traits_text = traits_text.replace(' that is ', '')\n text = text.replace('Sorry, ', 'Sorry, ' + traits_text + ' ')\n text = text.replace('that is not available', '[meal]')\n return text + allergens_text + ' is not available'\n else:\n return text + traits_text + allergens_text\n\ndef format_plural(text):\n \"\"\"Adds 'and' before last item in list of items.\n\n :param text: The string to be manipulated\n :type text: string\n \"\"\"\n if ',' in text:\n index = text.rfind(',') + 2\n text = text[:index] + 'and ' + text[index:]\n return text\n\ndef remove_spaces(url_block):\n \"\"\"Removes spaces in url string to create valid url string.\n\n :param url_block: The url string to be manipulated\n :type search: string\n \"\"\"\n temp = \"\"\n for i in range(len(url_block)):\n if url_block[i] == ' ':\n temp += '+'\n else:\n temp += url_block[i]\n return temp\n\ndef check_meal_available(data, meal):\n \"\"\"Searches response data to check if meal is available at specified location/date.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param meal: Name of meal\n :type meal: string\n \"\"\"\n for key in data['menu']['meal']:\n if data['menu']['meal']['name'].upper() == meal.upper():\n if 'course' in data['menu']['meal']:\n return True\n return False\n return False\n\ndef check_course_available(data, course):\n \"\"\"Searches response data to check if course is available in specified meal.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param course: Name of course\n :type course: string\n \"\"\"\n for i in range(len(data['menu']['meal']['course'])):\n for key, value in data['menu']['meal']['course'][i].items():\n if key == 'name':\n if value.upper() == course.upper():\n return True\n return False\n\n\n\ndef check_item_specifications(item, traits, allergens):\n \"\"\"Returns true if food item is satisfactory with specified traits and allergens.\n\n :param item: Data of specific food item\n :type item: dict\n :param traits: List of specified traits item must have, can be empty\n :type traits: list\n :param allergens: List of allergens item cannot have, can be empty\n :type allergens: list\n \"\"\"\n #Return false if allergens list isn't empty and any allergens found\n if allergens and 'allergens' in item:\n for allergen in allergens:\n if allergen in item['allergens']:\n return False\n\n #Return true if traits list empty\n if not traits:\n return True\n\n #Return false if traits list isn't empty and any traits are missing\n if 'trait' in item:\n for trait in traits:\n if trait not in item['trait']:\n return False\n\n #All traits found, return true\n return True\n else:\n return False\n\ndef get_items(data, requisites, formatted):\n \"\"\"Returns string of food items of each course in response data for\n fulfillmentText in response to Dialogflow.\n\n :param data: MDining API HTTP response data\n :type data: dict\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n :param formatted: True/False - formats response string if true\n :type formatted: boolean\n \"\"\"\n returndata = \"\"\n traits = requisites['trait']\n allergens = requisites['allergens']\n\n if formatted:\n prefix = '\\t'\n suffix = '\\n'\n else:\n prefix = ''\n suffix = ', '\n\n for course in data['menu']['meal']['course']:\n item_data = []\n datatype = type(course['menuitem'])\n\n if datatype is list:\n item_data += course['menuitem']\n else:\n item_data.append(course['menuitem'])\n\n for item in item_data:\n if check_item_specifications(item, traits, allergens) and 'No Service at this Time' not in item['name']:\n returndata += (prefix + (item['name']).rstrip(', ') + suffix)\n\n return returndata\n\ndef find_item_formatting(possible_matches):\n \"\"\"Formatting list of possible matches into more natural sentence structure\n by removing redundancy:\n [Chicken during lunch, chicken wings during lunch, and chicken patty during dinner] ->\n [Chicken, chicken wings during lunch, and chicken patty during dinner]\n\n :param possible_matches: List of food items in data that matched user input\n :type possible_matches: list\n \"\"\"\n for i in range(len(possible_matches)):\n if i == 0:\n continue\n words = possible_matches[i].split()\n\n #If previous term has same ending (\"Dinner\") as current term, remove it\n if possible_matches[i].split()[-1] == possible_matches[i - 1].split()[-1]:\n #8 = amount of characters taken up by [' during ']\n length = len(possible_matches[i].split()[-1]) + 8\n possible_matches[i - 1] = possible_matches[i - 1][:length*-1]\n\n return possible_matches\n\n\ndef find_matches(course_data, possible_matches, item_in, meal_name, requisites):\n \"\"\"Appends matches of specified food item in data of an individual course to\n list of possible matches.\n\n :param course_data: Chosen course subsection of MDining API HTTP response data\n :type course_data: dict\n :param possible_matches: List of food items in data that matched user input\n :type possible_matches: list\n :param item_in: User input food item\n :type item_in: string\n :param meal_name: Name of meal\n :type meal_name: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n\n traits = requisites['trait']\n allergens = requisites['allergens']\n\n item_data = []\n datatype = type(course_data)\n\n if datatype is list:\n item_data += course_data\n else:\n item_data.append(course_data)\n\n for item in item_data:\n if check_item_specifications(item, traits, allergens) == False:\n continue\n if item_in.upper() in item['name'].upper():\n if item['name'][-1] == ' ':\n item['name'] = item['name'][:-1]\n\n possible_matches.append(item['name'] + ' during ' + meal_name)\n\n return possible_matches\n\n\n\n#########################################################################\n###Primary Handler Functions\n\n\ndef request_location_and_meal(date_in, loc_in, meal_in, requisites):\n \"\"\"Handles searching for appropriate data response for valid specified\n location and meal entities from ``findLocationAndMeal`` intent.\n\n :param date_in: Input date\n :type date_in: string\n :param loc_in: Input location\n :type loc_in: string\n :param meal_in: Input meal\n :type meal_in: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n\n #preset vars\n url = 'http://api.studentlife.umich.edu/menu/xml2print.php?controller=&view=json'\n location = '&location='\n date = '&date='\n meal = '&meal='\n\n #API url concatenation\n location += loc_in\n meal += meal_in\n date += str(date_in)\n url = url + location + date + meal\n url = remove_spaces(url)\n\n #fetching json\n data = requests.get(url).json()\n\n #checking if specified meal available\n if check_meal_available(data, meal_in):\n returnstring = (get_items(data, requisites, False)).rstrip(', ')\n return format_plural(returnstring)\n else:\n return \"No meal is available\"\n\n#Handle meal item data request\ndef request_item(date_in, loc_in, item_in, meal_in, requisites):\n \"\"\"Handles searching for appropriate data response for valid specified\n location and food item entities (and meal entity if included) from ``findItem`` intent.\n\n :param date_in: Input date\n :type date_in: string\n :param loc_in: Input location\n :type loc_in: string\n :param item_in: Input food item\n :type item_in: string\n :param meal_in: Input meal, can be empty string if not specified\n :type meal_in: string\n :param requisites: Contains information food item must comply with (traits, allergens, etc)\n :type requisites: dict\n \"\"\"\n secrets = get_secrets()\n url = secrets.get('m_dining_api_main')\n location = '&location='\n date = '&date='\n meal = '&meal='\n\n #API url concatenation\n location += loc_in\n date += str(date_in)\n url = url + location + date + meal\n url = remove_spaces(url)\n\n if meal_in == '':\n meal_entered = False\n else:\n meal_entered = True\n\n #fetching json\n data = requests.get(url).json()\n\n possible_matches = []\n\n #Loop through meals\n for i in data['menu']['meal']:\n\n #If meal specified, only check specified meal\n if meal_entered and i['name'].upper() != meal_in.upper():\n continue\n #Skip meal if no food items available\n if 'course' not in i:\n continue\n\n #Loop through food items in course\n for j in i['course']:\n for key, value in j.items():\n if key == 'name':\n course_data = j['menuitem']\n meal_name = i['name']\n #Append matches to specified item to possible_matches list\n possible_matches = find_matches(course_data, possible_matches,\n item_in, meal_name, requisites)\n \n #Specified item found\n if possible_matches:\n possible_matches = find_item_formatting(possible_matches)\n text = 'Yes, there is '\n for i in range(len(possible_matches)):\n if len(possible_matches) > 1 and (i == len(possible_matches) - 1):\n text += ' and'\n text += ' ' + possible_matches[i]\n if i != len(possible_matches) - 1:\n text += ','\n\n #Specified item not found\n else:\n text = 'Sorry, that is not available'\n\n\n return {'fulfillmentText': text}\n",
"step-ids": [
10,
12,
13,
14,
15
]
}
|
[
10,
12,
13,
14,
15
] |
from django.db import models
from django.contrib.auth.models import User as sUser
TYPES = (
('public', 'public'),
('private', 'private'),
)
#class GroupManager(models.Manager):
# def get_all_users(self):
# return self.extra(where=['users'])
class Group(models.Model):
name = models.CharField(max_length=100, default='')
description = models.TextField()
owner = models.ForeignKey(sUser, related_name='my_own_groups')
users = models.ManyToManyField(sUser, related_name='my_groups')
type = models.CharField(max_length=7, choices=TYPES)
created_at = models.DateField(auto_now=True)
#objects =GroupManager()
def __unicode__(self):
return self.name
|
normal
|
{
"blob_id": "8baf61a20a64f296304b6a7017a24f1216e3d771",
"index": 2908,
"step-1": "<mask token>\n\n\nclass Group(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Group(models.Model):\n name = models.CharField(max_length=100, default='')\n description = models.TextField()\n owner = models.ForeignKey(sUser, related_name='my_own_groups')\n users = models.ManyToManyField(sUser, related_name='my_groups')\n type = models.CharField(max_length=7, choices=TYPES)\n created_at = models.DateField(auto_now=True)\n\n def __unicode__(self):\n return self.name\n",
"step-3": "<mask token>\nTYPES = ('public', 'public'), ('private', 'private')\n\n\nclass Group(models.Model):\n name = models.CharField(max_length=100, default='')\n description = models.TextField()\n owner = models.ForeignKey(sUser, related_name='my_own_groups')\n users = models.ManyToManyField(sUser, related_name='my_groups')\n type = models.CharField(max_length=7, choices=TYPES)\n created_at = models.DateField(auto_now=True)\n\n def __unicode__(self):\n return self.name\n",
"step-4": "from django.db import models\nfrom django.contrib.auth.models import User as sUser\nTYPES = ('public', 'public'), ('private', 'private')\n\n\nclass Group(models.Model):\n name = models.CharField(max_length=100, default='')\n description = models.TextField()\n owner = models.ForeignKey(sUser, related_name='my_own_groups')\n users = models.ManyToManyField(sUser, related_name='my_groups')\n type = models.CharField(max_length=7, choices=TYPES)\n created_at = models.DateField(auto_now=True)\n\n def __unicode__(self):\n return self.name\n",
"step-5": "from django.db import models\nfrom django.contrib.auth.models import User as sUser\n\nTYPES = (\n ('public', 'public'),\n ('private', 'private'),\n)\n\n#class GroupManager(models.Manager):\n# def get_all_users(self):\n# return self.extra(where=['users'])\n\nclass Group(models.Model):\n name = models.CharField(max_length=100, default='')\n description = models.TextField()\n owner = models.ForeignKey(sUser, related_name='my_own_groups')\n users = models.ManyToManyField(sUser, related_name='my_groups')\n type = models.CharField(max_length=7, choices=TYPES)\n created_at = models.DateField(auto_now=True)\n #objects =GroupManager()\n\n def __unicode__(self):\n return self.name",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
s1 = {10, 20, 30, 60, 70, 80, 90}
s2 = set()
print(s2)
s1.add(100)
print(s1.pop())
print(10 in s1)
print(10 not in s1)
|
normal
|
{
"blob_id": "3747e45dcba548060f25bd6d6f0e0e96091ca3df",
"index": 2358,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(s2)\ns1.add(100)\nprint(s1.pop())\nprint(10 in s1)\nprint(10 not in s1)\n",
"step-3": "s1 = {10, 20, 30, 60, 70, 80, 90}\ns2 = set()\nprint(s2)\ns1.add(100)\nprint(s1.pop())\nprint(10 in s1)\nprint(10 not in s1)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def emailpatient(firstname, lastname, password, otp, email, id):
print('\n== UTILS ===')
html_message = (
"""
<html>
<body>
<p>Welcome %s %s and pass is %s and otp is %d</p>
<p>http://127.0.0.1:8000/varificationpage/%d<p>
</body>
</html>
"""
% (firstname, lastname, password, otp, id))
plain_message = strip_tags(html_message)
send_mail('my subjects', plain_message,
'[email protected]', [email], html_message=html_message)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Email(doctorFullName, password, otp, email, id):
print('\n== UTILS ===')
html_message = (
"""
<html>
<body>
<p>Welcome %s and pass is %s and %d</p>
<p>http://127.0.0.1:8000/varificationpage/%d<p>
</body>
</html>
"""
% (doctorFullName, password, otp, id))
plain_message = strip_tags(html_message)
send_mail('my subjects', plain_message,
'[email protected]', [email], html_message=html_message)
def emailpatient(firstname, lastname, password, otp, email, id):
print('\n== UTILS ===')
html_message = (
"""
<html>
<body>
<p>Welcome %s %s and pass is %s and otp is %d</p>
<p>http://127.0.0.1:8000/varificationpage/%d<p>
</body>
</html>
"""
% (firstname, lastname, password, otp, id))
plain_message = strip_tags(html_message)
send_mail('my subjects', plain_message,
'[email protected]', [email], html_message=html_message)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Email(doctorFullName, password, otp, email, id):
print('\n== UTILS ===')
html_message = (
"""
<html>
<body>
<p>Welcome %s and pass is %s and %d</p>
<p>http://127.0.0.1:8000/varificationpage/%d<p>
</body>
</html>
"""
% (doctorFullName, password, otp, id))
plain_message = strip_tags(html_message)
send_mail('my subjects', plain_message,
'[email protected]', [email], html_message=html_message)
def emailpatient(firstname, lastname, password, otp, email, id):
print('\n== UTILS ===')
html_message = (
"""
<html>
<body>
<p>Welcome %s %s and pass is %s and otp is %d</p>
<p>http://127.0.0.1:8000/varificationpage/%d<p>
</body>
</html>
"""
% (firstname, lastname, password, otp, id))
plain_message = strip_tags(html_message)
send_mail('my subjects', plain_message,
'[email protected]', [email], html_message=html_message)
def forgotPassword(otp, email, id):
email_subject = 'This is your new OTP'
print('\n== UTILS ===')
html_message = (
"""
<html>
<body>
<p>Welcome %s Your Otp is %d </p>
<p>http://127.0.0.1:8000/forgetpwdvarification/%d<p>
</body>
</html>
"""
% (email, otp, id))
print(otp)
plain_message = strip_tags(html_message)
send_mail('my subjects', plain_message,
'[email protected]', [email], html_message=html_message)
<|reserved_special_token_1|>
from django.utils.html import strip_tags
from django.core.mail import send_mail
from django.urls import reverse
from django.http import HttpResponseRedirect
def Email(doctorFullName, password, otp, email, id):
print('\n== UTILS ===')
html_message = (
"""
<html>
<body>
<p>Welcome %s and pass is %s and %d</p>
<p>http://127.0.0.1:8000/varificationpage/%d<p>
</body>
</html>
"""
% (doctorFullName, password, otp, id))
plain_message = strip_tags(html_message)
send_mail('my subjects', plain_message,
'[email protected]', [email], html_message=html_message)
def emailpatient(firstname, lastname, password, otp, email, id):
print('\n== UTILS ===')
html_message = (
"""
<html>
<body>
<p>Welcome %s %s and pass is %s and otp is %d</p>
<p>http://127.0.0.1:8000/varificationpage/%d<p>
</body>
</html>
"""
% (firstname, lastname, password, otp, id))
plain_message = strip_tags(html_message)
send_mail('my subjects', plain_message,
'[email protected]', [email], html_message=html_message)
def forgotPassword(otp, email, id):
email_subject = 'This is your new OTP'
print('\n== UTILS ===')
html_message = (
"""
<html>
<body>
<p>Welcome %s Your Otp is %d </p>
<p>http://127.0.0.1:8000/forgetpwdvarification/%d<p>
</body>
</html>
"""
% (email, otp, id))
print(otp)
plain_message = strip_tags(html_message)
send_mail('my subjects', plain_message,
'[email protected]', [email], html_message=html_message)
<|reserved_special_token_1|>
from django.utils.html import strip_tags
from django.core.mail import send_mail
from django.urls import reverse
from django.http import HttpResponseRedirect
def Email(doctorFullName,password,otp,email,id):
print("\n== UTILS ===")
html_message='''
<html>
<body>
<p>Welcome %s and pass is %s and %d</p>
<p>http://127.0.0.1:8000/varificationpage/%d<p>
</body>
</html>
'''%(doctorFullName,password,otp,id)
plain_message =strip_tags(html_message)
send_mail("my subjects",plain_message,'[email protected]',[email],html_message=html_message)
def emailpatient(firstname,lastname,password,otp,email,id):
print("\n== UTILS ===")
html_message='''
<html>
<body>
<p>Welcome %s %s and pass is %s and otp is %d</p>
<p>http://127.0.0.1:8000/varificationpage/%d<p>
</body>
</html>
'''%(firstname,lastname,password,otp,id)
plain_message =strip_tags(html_message)
send_mail("my subjects",plain_message,'[email protected]',[email],html_message=html_message)
def forgotPassword(otp,email,id):
email_subject = "This is your new OTP"
print("\n== UTILS ===")
html_message='''
<html>
<body>
<p>Welcome %s Your Otp is %d </p>
<p>http://127.0.0.1:8000/forgetpwdvarification/%d<p>
</body>
</html>
'''%(email,otp,id)
print(otp)
plain_message =strip_tags(html_message)
send_mail("my subjects",plain_message,'[email protected]',[email],html_message=html_message)
# return HttpResponseRedirect(reverse(login))
# link = "https://localhost:8000/example?email="+email+"&otp="+otp+"&random="+random
# send_mail(email_subject, 'mail_template','[email protected]', [email], {'otp': otp})
|
flexible
|
{
"blob_id": "4ecf9c03750a31ecd113a7548df4e2a700e775e0",
"index": 4034,
"step-1": "<mask token>\n\n\ndef emailpatient(firstname, lastname, password, otp, email, id):\n print('\\n== UTILS ===')\n html_message = (\n \"\"\"\n <html>\n <body>\n <p>Welcome %s %s and pass is %s and otp is %d</p>\n <p>http://127.0.0.1:8000/varificationpage/%d<p>\n </body>\n </html>\n \"\"\"\n % (firstname, lastname, password, otp, id))\n plain_message = strip_tags(html_message)\n send_mail('my subjects', plain_message,\n '[email protected]', [email], html_message=html_message)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef Email(doctorFullName, password, otp, email, id):\n print('\\n== UTILS ===')\n html_message = (\n \"\"\"\n <html>\n <body>\n <p>Welcome %s and pass is %s and %d</p>\n <p>http://127.0.0.1:8000/varificationpage/%d<p>\n </body>\n </html>\n \"\"\"\n % (doctorFullName, password, otp, id))\n plain_message = strip_tags(html_message)\n send_mail('my subjects', plain_message,\n '[email protected]', [email], html_message=html_message)\n\n\ndef emailpatient(firstname, lastname, password, otp, email, id):\n print('\\n== UTILS ===')\n html_message = (\n \"\"\"\n <html>\n <body>\n <p>Welcome %s %s and pass is %s and otp is %d</p>\n <p>http://127.0.0.1:8000/varificationpage/%d<p>\n </body>\n </html>\n \"\"\"\n % (firstname, lastname, password, otp, id))\n plain_message = strip_tags(html_message)\n send_mail('my subjects', plain_message,\n '[email protected]', [email], html_message=html_message)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef Email(doctorFullName, password, otp, email, id):\n print('\\n== UTILS ===')\n html_message = (\n \"\"\"\n <html>\n <body>\n <p>Welcome %s and pass is %s and %d</p>\n <p>http://127.0.0.1:8000/varificationpage/%d<p>\n </body>\n </html>\n \"\"\"\n % (doctorFullName, password, otp, id))\n plain_message = strip_tags(html_message)\n send_mail('my subjects', plain_message,\n '[email protected]', [email], html_message=html_message)\n\n\ndef emailpatient(firstname, lastname, password, otp, email, id):\n print('\\n== UTILS ===')\n html_message = (\n \"\"\"\n <html>\n <body>\n <p>Welcome %s %s and pass is %s and otp is %d</p>\n <p>http://127.0.0.1:8000/varificationpage/%d<p>\n </body>\n </html>\n \"\"\"\n % (firstname, lastname, password, otp, id))\n plain_message = strip_tags(html_message)\n send_mail('my subjects', plain_message,\n '[email protected]', [email], html_message=html_message)\n\n\ndef forgotPassword(otp, email, id):\n email_subject = 'This is your new OTP'\n print('\\n== UTILS ===')\n html_message = (\n \"\"\"\n <html>\n <body>\n <p>Welcome %s Your Otp is %d </p>\n <p>http://127.0.0.1:8000/forgetpwdvarification/%d<p>\n </body>\n </html>\n \"\"\"\n % (email, otp, id))\n print(otp)\n plain_message = strip_tags(html_message)\n send_mail('my subjects', plain_message,\n '[email protected]', [email], html_message=html_message)\n",
"step-4": "from django.utils.html import strip_tags\nfrom django.core.mail import send_mail\nfrom django.urls import reverse\nfrom django.http import HttpResponseRedirect\n\n\ndef Email(doctorFullName, password, otp, email, id):\n print('\\n== UTILS ===')\n html_message = (\n \"\"\"\n <html>\n <body>\n <p>Welcome %s and pass is %s and %d</p>\n <p>http://127.0.0.1:8000/varificationpage/%d<p>\n </body>\n </html>\n \"\"\"\n % (doctorFullName, password, otp, id))\n plain_message = strip_tags(html_message)\n send_mail('my subjects', plain_message,\n '[email protected]', [email], html_message=html_message)\n\n\ndef emailpatient(firstname, lastname, password, otp, email, id):\n print('\\n== UTILS ===')\n html_message = (\n \"\"\"\n <html>\n <body>\n <p>Welcome %s %s and pass is %s and otp is %d</p>\n <p>http://127.0.0.1:8000/varificationpage/%d<p>\n </body>\n </html>\n \"\"\"\n % (firstname, lastname, password, otp, id))\n plain_message = strip_tags(html_message)\n send_mail('my subjects', plain_message,\n '[email protected]', [email], html_message=html_message)\n\n\ndef forgotPassword(otp, email, id):\n email_subject = 'This is your new OTP'\n print('\\n== UTILS ===')\n html_message = (\n \"\"\"\n <html>\n <body>\n <p>Welcome %s Your Otp is %d </p>\n <p>http://127.0.0.1:8000/forgetpwdvarification/%d<p>\n </body>\n </html>\n \"\"\"\n % (email, otp, id))\n print(otp)\n plain_message = strip_tags(html_message)\n send_mail('my subjects', plain_message,\n '[email protected]', [email], html_message=html_message)\n",
"step-5": "from django.utils.html import strip_tags\r\nfrom django.core.mail import send_mail\r\nfrom django.urls import reverse\r\nfrom django.http import HttpResponseRedirect\r\n\r\ndef Email(doctorFullName,password,otp,email,id):\r\n print(\"\\n== UTILS ===\")\r\n html_message='''\r\n <html>\r\n <body>\r\n <p>Welcome %s and pass is %s and %d</p>\r\n <p>http://127.0.0.1:8000/varificationpage/%d<p>\r\n </body>\r\n </html>\r\n '''%(doctorFullName,password,otp,id)\r\n plain_message =strip_tags(html_message)\r\n send_mail(\"my subjects\",plain_message,'[email protected]',[email],html_message=html_message)\r\ndef emailpatient(firstname,lastname,password,otp,email,id):\r\n print(\"\\n== UTILS ===\")\r\n html_message='''\r\n <html>\r\n <body>\r\n <p>Welcome %s %s and pass is %s and otp is %d</p>\r\n <p>http://127.0.0.1:8000/varificationpage/%d<p>\r\n </body>\r\n </html>\r\n '''%(firstname,lastname,password,otp,id)\r\n plain_message =strip_tags(html_message)\r\n send_mail(\"my subjects\",plain_message,'[email protected]',[email],html_message=html_message)\r\n\r\n \r\ndef forgotPassword(otp,email,id):\r\n email_subject = \"This is your new OTP\"\r\n print(\"\\n== UTILS ===\")\r\n html_message='''\r\n <html>\r\n <body>\r\n <p>Welcome %s Your Otp is %d </p>\r\n <p>http://127.0.0.1:8000/forgetpwdvarification/%d<p>\r\n </body>\r\n </html>\r\n '''%(email,otp,id)\r\n print(otp)\r\n plain_message =strip_tags(html_message)\r\n send_mail(\"my subjects\",plain_message,'[email protected]',[email],html_message=html_message)\r\n # return HttpResponseRedirect(reverse(login))\r\n # link = \"https://localhost:8000/example?email=\"+email+\"&otp=\"+otp+\"&random=\"+random\r\n # send_mail(email_subject, 'mail_template','[email protected]', [email], {'otp': otp})",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import os
import unittest
import logging
from collections import Counter
from utility import token_util
class TestFileReadingFunctions(unittest.TestCase):
def setUp(self):
self.data_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data")
self.one_word_per_line_path = os.path.join(self.data_dir, "one_word_per_line.txt")
self.one_sent_per_line_path = os.path.join(self.data_dir, "one_sent_per_line.txt")
self.token2id_path = os.path.join(self.data_dir, "token2id.txt")
self.word_cnt_path_list = [self.one_sent_per_line_path, self.one_word_per_line_path]
self.logger = logging.getLogger("ReadingFunctions Test Logger")
def test_token_cnt(self):
one_word_per_line_counter = Counter({"a_1": 1, "b_2": 2, "c_3": 3, "d_4": 4})
one_sent_per_line_counter = Counter({"a_1": 1, "b_2": 2, "c_3": 3, "d_4": 4, "e_5": 5, "f_6": 6})
c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path], separator=None, workers=1, parallel_mode="size")
self.assertEqual(c, one_word_per_line_counter)
c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path], separator=None, workers=3, parallel_mode="size")
self.assertEqual(c, one_word_per_line_counter)
c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path], separator=None, workers=1, parallel_mode="size")
self.assertEqual(c, one_sent_per_line_counter)
c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path], separator=None, workers=3, parallel_mode="size")
self.assertEqual(c, one_sent_per_line_counter)
c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path, self.one_sent_per_line_path], separator=None, workers=1, parallel_mode="size")
self.assertEqual(c, one_word_per_line_counter + one_sent_per_line_counter)
c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path, self.one_sent_per_line_path], separator=None, workers=3, parallel_mode="size")
self.assertEqual(c, one_word_per_line_counter + one_sent_per_line_counter)
c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path, self.one_sent_per_line_path], separator=None, workers=1, parallel_mode="file")
self.assertEqual(c, one_word_per_line_counter + one_sent_per_line_counter)
c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path, self.one_sent_per_line_path], separator=None, workers=3, parallel_mode="file")
self.assertEqual(c, one_word_per_line_counter + one_sent_per_line_counter)
def test_gen_token_id_from_file(self):
one_word_per_line_counter = Counter({"a_1": 1, "b_2": 2, "c_3": 3, "d_4": 4})
one_sent_per_line_counter = Counter({"a_1": 1, "b_2": 2, "c_3": 3, "d_4": 4, "e_5": 5, "f_6": 6})
res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter, min_cnt=-1, max_size=-1, separator=None)
self.assertEqual(res_list, ["f_6", "e_5", "d_4", "c_3", "b_2", "a_1"])
res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter, min_cnt=2, max_size=-1, separator=None)
self.assertEqual(res_list, ["f_6", "e_5", "d_4", "c_3"])
res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter, min_cnt=-1, max_size=2, separator=None)
self.assertEqual(res_list, ["f_6", "e_5"])
res_list = token_util.gen_token_id_from_file([self.one_sent_per_line_path], min_cnt=-1, max_size=-1, separator=None)
self.assertEqual(res_list, ["f_6", "e_5", "d_4", "c_3", "b_2", "a_1"])
res_list = token_util.gen_token_id_from_file([self.one_sent_per_line_path], min_cnt=2, max_size=-1, separator=None)
self.assertEqual(res_list, ["f_6", "e_5", "d_4", "c_3"])
res_list = token_util.gen_token_id_from_file([self.one_sent_per_line_path], min_cnt=-1, max_size=2, separator=None)
self.assertEqual(res_list, ["f_6", "e_5"])
res_list = token_util.gen_token_id_from_file([self.one_sent_per_line_path, self.one_word_per_line_path], min_cnt=2, max_size=-1, separator=None)
self.assertAlmostEqual(res_list, ["d_4", "f_6", "c_3", "e_5", "b_2"], delta=2)
res_list = token_util.gen_token_id_from_file([self.one_sent_per_line_path, self.one_word_per_line_path], min_cnt=-1, max_size=3, separator=None)
self.assertAlmostEqual(res_list, ["d_4", "f_6", "c_3"], delta=2)
def test_load_token_id(self):
token2id, id2token = token_util.load_token_id(self.token2id_path)
self.assertEqual(token2id, {"a_0": 0, "b_1": 1, "c_2": 2, "d_3": 3, "UNK": 4})
self.assertEqual(id2token, ["a_0", "b_1", "c_2", "d_3", "UNK"])
if __name__ == "__main__":
unittest.main()
|
normal
|
{
"blob_id": "7c3798aa9cc5424656572dfaa87f7acb961613eb",
"index": 8715,
"step-1": "<mask token>\n\n\nclass TestFileReadingFunctions(unittest.TestCase):\n\n def setUp(self):\n self.data_dir = os.path.join(os.path.dirname(os.path.realpath(\n __file__)), 'data')\n self.one_word_per_line_path = os.path.join(self.data_dir,\n 'one_word_per_line.txt')\n self.one_sent_per_line_path = os.path.join(self.data_dir,\n 'one_sent_per_line.txt')\n self.token2id_path = os.path.join(self.data_dir, 'token2id.txt')\n self.word_cnt_path_list = [self.one_sent_per_line_path, self.\n one_word_per_line_path]\n self.logger = logging.getLogger('ReadingFunctions Test Logger')\n\n def test_token_cnt(self):\n one_word_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4})\n one_sent_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4, 'e_5': 5, 'f_6': 6})\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path\n ], separator=None, workers=1, parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path\n ], separator=None, workers=3, parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path\n ], separator=None, workers=1, parallel_mode='size')\n self.assertEqual(c, one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path\n ], separator=None, workers=3, parallel_mode='size')\n self.assertEqual(c, one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=1,\n parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=3,\n parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=1,\n parallel_mode='file')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=3,\n parallel_mode='file')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n <mask token>\n\n def test_load_token_id(self):\n token2id, id2token = token_util.load_token_id(self.token2id_path)\n self.assertEqual(token2id, {'a_0': 0, 'b_1': 1, 'c_2': 2, 'd_3': 3,\n 'UNK': 4})\n self.assertEqual(id2token, ['a_0', 'b_1', 'c_2', 'd_3', 'UNK'])\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass TestFileReadingFunctions(unittest.TestCase):\n\n def setUp(self):\n self.data_dir = os.path.join(os.path.dirname(os.path.realpath(\n __file__)), 'data')\n self.one_word_per_line_path = os.path.join(self.data_dir,\n 'one_word_per_line.txt')\n self.one_sent_per_line_path = os.path.join(self.data_dir,\n 'one_sent_per_line.txt')\n self.token2id_path = os.path.join(self.data_dir, 'token2id.txt')\n self.word_cnt_path_list = [self.one_sent_per_line_path, self.\n one_word_per_line_path]\n self.logger = logging.getLogger('ReadingFunctions Test Logger')\n\n def test_token_cnt(self):\n one_word_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4})\n one_sent_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4, 'e_5': 5, 'f_6': 6})\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path\n ], separator=None, workers=1, parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path\n ], separator=None, workers=3, parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path\n ], separator=None, workers=1, parallel_mode='size')\n self.assertEqual(c, one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path\n ], separator=None, workers=3, parallel_mode='size')\n self.assertEqual(c, one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=1,\n parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=3,\n parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=1,\n parallel_mode='file')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=3,\n parallel_mode='file')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n\n def test_gen_token_id_from_file(self):\n one_word_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4})\n one_sent_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4, 'e_5': 5, 'f_6': 6})\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter,\n min_cnt=-1, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3', 'b_2', 'a_1'])\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter,\n min_cnt=2, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3'])\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter,\n min_cnt=-1, max_size=2, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path], min_cnt=-1, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3', 'b_2', 'a_1'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path], min_cnt=2, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path], min_cnt=-1, max_size=2, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path, self.one_word_per_line_path], min_cnt=2,\n max_size=-1, separator=None)\n self.assertAlmostEqual(res_list, ['d_4', 'f_6', 'c_3', 'e_5', 'b_2'\n ], delta=2)\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path, self.one_word_per_line_path], min_cnt=-\n 1, max_size=3, separator=None)\n self.assertAlmostEqual(res_list, ['d_4', 'f_6', 'c_3'], delta=2)\n\n def test_load_token_id(self):\n token2id, id2token = token_util.load_token_id(self.token2id_path)\n self.assertEqual(token2id, {'a_0': 0, 'b_1': 1, 'c_2': 2, 'd_3': 3,\n 'UNK': 4})\n self.assertEqual(id2token, ['a_0', 'b_1', 'c_2', 'd_3', 'UNK'])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestFileReadingFunctions(unittest.TestCase):\n\n def setUp(self):\n self.data_dir = os.path.join(os.path.dirname(os.path.realpath(\n __file__)), 'data')\n self.one_word_per_line_path = os.path.join(self.data_dir,\n 'one_word_per_line.txt')\n self.one_sent_per_line_path = os.path.join(self.data_dir,\n 'one_sent_per_line.txt')\n self.token2id_path = os.path.join(self.data_dir, 'token2id.txt')\n self.word_cnt_path_list = [self.one_sent_per_line_path, self.\n one_word_per_line_path]\n self.logger = logging.getLogger('ReadingFunctions Test Logger')\n\n def test_token_cnt(self):\n one_word_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4})\n one_sent_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4, 'e_5': 5, 'f_6': 6})\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path\n ], separator=None, workers=1, parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path\n ], separator=None, workers=3, parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path\n ], separator=None, workers=1, parallel_mode='size')\n self.assertEqual(c, one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path\n ], separator=None, workers=3, parallel_mode='size')\n self.assertEqual(c, one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=1,\n parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=3,\n parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=1,\n parallel_mode='file')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=3,\n parallel_mode='file')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n\n def test_gen_token_id_from_file(self):\n one_word_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4})\n one_sent_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4, 'e_5': 5, 'f_6': 6})\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter,\n min_cnt=-1, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3', 'b_2', 'a_1'])\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter,\n min_cnt=2, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3'])\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter,\n min_cnt=-1, max_size=2, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path], min_cnt=-1, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3', 'b_2', 'a_1'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path], min_cnt=2, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path], min_cnt=-1, max_size=2, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path, self.one_word_per_line_path], min_cnt=2,\n max_size=-1, separator=None)\n self.assertAlmostEqual(res_list, ['d_4', 'f_6', 'c_3', 'e_5', 'b_2'\n ], delta=2)\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path, self.one_word_per_line_path], min_cnt=-\n 1, max_size=3, separator=None)\n self.assertAlmostEqual(res_list, ['d_4', 'f_6', 'c_3'], delta=2)\n\n def test_load_token_id(self):\n token2id, id2token = token_util.load_token_id(self.token2id_path)\n self.assertEqual(token2id, {'a_0': 0, 'b_1': 1, 'c_2': 2, 'd_3': 3,\n 'UNK': 4})\n self.assertEqual(id2token, ['a_0', 'b_1', 'c_2', 'd_3', 'UNK'])\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "from __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nimport sys\nimport os\nimport unittest\nimport logging\nfrom collections import Counter\nfrom utility import token_util\n\n\nclass TestFileReadingFunctions(unittest.TestCase):\n\n def setUp(self):\n self.data_dir = os.path.join(os.path.dirname(os.path.realpath(\n __file__)), 'data')\n self.one_word_per_line_path = os.path.join(self.data_dir,\n 'one_word_per_line.txt')\n self.one_sent_per_line_path = os.path.join(self.data_dir,\n 'one_sent_per_line.txt')\n self.token2id_path = os.path.join(self.data_dir, 'token2id.txt')\n self.word_cnt_path_list = [self.one_sent_per_line_path, self.\n one_word_per_line_path]\n self.logger = logging.getLogger('ReadingFunctions Test Logger')\n\n def test_token_cnt(self):\n one_word_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4})\n one_sent_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4, 'e_5': 5, 'f_6': 6})\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path\n ], separator=None, workers=1, parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path\n ], separator=None, workers=3, parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path\n ], separator=None, workers=1, parallel_mode='size')\n self.assertEqual(c, one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path\n ], separator=None, workers=3, parallel_mode='size')\n self.assertEqual(c, one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=1,\n parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=3,\n parallel_mode='size')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=1,\n parallel_mode='file')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path,\n self.one_sent_per_line_path], separator=None, workers=3,\n parallel_mode='file')\n self.assertEqual(c, one_word_per_line_counter +\n one_sent_per_line_counter)\n\n def test_gen_token_id_from_file(self):\n one_word_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4})\n one_sent_per_line_counter = Counter({'a_1': 1, 'b_2': 2, 'c_3': 3,\n 'd_4': 4, 'e_5': 5, 'f_6': 6})\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter,\n min_cnt=-1, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3', 'b_2', 'a_1'])\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter,\n min_cnt=2, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3'])\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter,\n min_cnt=-1, max_size=2, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path], min_cnt=-1, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3', 'b_2', 'a_1'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path], min_cnt=2, max_size=-1, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5', 'd_4', 'c_3'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path], min_cnt=-1, max_size=2, separator=None)\n self.assertEqual(res_list, ['f_6', 'e_5'])\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path, self.one_word_per_line_path], min_cnt=2,\n max_size=-1, separator=None)\n self.assertAlmostEqual(res_list, ['d_4', 'f_6', 'c_3', 'e_5', 'b_2'\n ], delta=2)\n res_list = token_util.gen_token_id_from_file([self.\n one_sent_per_line_path, self.one_word_per_line_path], min_cnt=-\n 1, max_size=3, separator=None)\n self.assertAlmostEqual(res_list, ['d_4', 'f_6', 'c_3'], delta=2)\n\n def test_load_token_id(self):\n token2id, id2token = token_util.load_token_id(self.token2id_path)\n self.assertEqual(token2id, {'a_0': 0, 'b_1': 1, 'c_2': 2, 'd_3': 3,\n 'UNK': 4})\n self.assertEqual(id2token, ['a_0', 'b_1', 'c_2', 'd_3', 'UNK'])\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "#!/usr/bin/python\n# -*- coding: UTF-8 -*-\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport sys\nimport os\nimport unittest\nimport logging\nfrom collections import Counter\n\nfrom utility import token_util\n\n\nclass TestFileReadingFunctions(unittest.TestCase):\n def setUp(self):\n self.data_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), \"data\")\n self.one_word_per_line_path = os.path.join(self.data_dir, \"one_word_per_line.txt\")\n self.one_sent_per_line_path = os.path.join(self.data_dir, \"one_sent_per_line.txt\")\n self.token2id_path = os.path.join(self.data_dir, \"token2id.txt\")\n self.word_cnt_path_list = [self.one_sent_per_line_path, self.one_word_per_line_path]\n\n self.logger = logging.getLogger(\"ReadingFunctions Test Logger\")\n\n def test_token_cnt(self):\n one_word_per_line_counter = Counter({\"a_1\": 1, \"b_2\": 2, \"c_3\": 3, \"d_4\": 4})\n one_sent_per_line_counter = Counter({\"a_1\": 1, \"b_2\": 2, \"c_3\": 3, \"d_4\": 4, \"e_5\": 5, \"f_6\": 6})\n\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path], separator=None, workers=1, parallel_mode=\"size\")\n self.assertEqual(c, one_word_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path], separator=None, workers=3, parallel_mode=\"size\")\n self.assertEqual(c, one_word_per_line_counter)\n\n c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path], separator=None, workers=1, parallel_mode=\"size\")\n self.assertEqual(c, one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_sent_per_line_path], separator=None, workers=3, parallel_mode=\"size\")\n self.assertEqual(c, one_sent_per_line_counter)\n\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path, self.one_sent_per_line_path], separator=None, workers=1, parallel_mode=\"size\")\n self.assertEqual(c, one_word_per_line_counter + one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path, self.one_sent_per_line_path], separator=None, workers=3, parallel_mode=\"size\")\n self.assertEqual(c, one_word_per_line_counter + one_sent_per_line_counter)\n\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path, self.one_sent_per_line_path], separator=None, workers=1, parallel_mode=\"file\")\n self.assertEqual(c, one_word_per_line_counter + one_sent_per_line_counter)\n c = token_util.gen_token_cnt_from_file([self.one_word_per_line_path, self.one_sent_per_line_path], separator=None, workers=3, parallel_mode=\"file\")\n self.assertEqual(c, one_word_per_line_counter + one_sent_per_line_counter)\n\n def test_gen_token_id_from_file(self):\n one_word_per_line_counter = Counter({\"a_1\": 1, \"b_2\": 2, \"c_3\": 3, \"d_4\": 4})\n one_sent_per_line_counter = Counter({\"a_1\": 1, \"b_2\": 2, \"c_3\": 3, \"d_4\": 4, \"e_5\": 5, \"f_6\": 6})\n\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter, min_cnt=-1, max_size=-1, separator=None)\n self.assertEqual(res_list, [\"f_6\", \"e_5\", \"d_4\", \"c_3\", \"b_2\", \"a_1\"])\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter, min_cnt=2, max_size=-1, separator=None)\n self.assertEqual(res_list, [\"f_6\", \"e_5\", \"d_4\", \"c_3\"])\n res_list = token_util.gen_token_id_from_file(one_sent_per_line_counter, min_cnt=-1, max_size=2, separator=None)\n self.assertEqual(res_list, [\"f_6\", \"e_5\"])\n\n res_list = token_util.gen_token_id_from_file([self.one_sent_per_line_path], min_cnt=-1, max_size=-1, separator=None)\n self.assertEqual(res_list, [\"f_6\", \"e_5\", \"d_4\", \"c_3\", \"b_2\", \"a_1\"])\n res_list = token_util.gen_token_id_from_file([self.one_sent_per_line_path], min_cnt=2, max_size=-1, separator=None)\n self.assertEqual(res_list, [\"f_6\", \"e_5\", \"d_4\", \"c_3\"])\n res_list = token_util.gen_token_id_from_file([self.one_sent_per_line_path], min_cnt=-1, max_size=2, separator=None)\n self.assertEqual(res_list, [\"f_6\", \"e_5\"])\n\n res_list = token_util.gen_token_id_from_file([self.one_sent_per_line_path, self.one_word_per_line_path], min_cnt=2, max_size=-1, separator=None)\n self.assertAlmostEqual(res_list, [\"d_4\", \"f_6\", \"c_3\", \"e_5\", \"b_2\"], delta=2)\n res_list = token_util.gen_token_id_from_file([self.one_sent_per_line_path, self.one_word_per_line_path], min_cnt=-1, max_size=3, separator=None)\n self.assertAlmostEqual(res_list, [\"d_4\", \"f_6\", \"c_3\"], delta=2)\n\n def test_load_token_id(self):\n token2id, id2token = token_util.load_token_id(self.token2id_path)\n self.assertEqual(token2id, {\"a_0\": 0, \"b_1\": 1, \"c_2\": 2, \"d_3\": 3, \"UNK\": 4})\n self.assertEqual(id2token, [\"a_0\", \"b_1\", \"c_2\", \"d_3\", \"UNK\"])\n\n\nif __name__ == \"__main__\":\n unittest.main()\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
import time
import argparse
import utils
from data_loader import DataLoader
from generate_model_predictions import sacrebleu_metric, compute_bleu
import tensorflow as tf
import os
import json
from transformer import create_masks
# Since the target sequences are padded, it is important
# to apply a padding mask when calculating the loss.
def loss_function(real, pred, loss_object, pad_token_id):
"""Calculates total loss containing cross entropy with padding ignored.
Args:
real: Tensor of size [batch_size, length_logits, vocab_size]
pred: Tensor of size [batch_size, length_labels]
loss_object: Cross entropy loss
pad_token_id: Pad token id to ignore
Returns:
A scalar float tensor for loss.
"""
mask = tf.math.logical_not(tf.math.equal(real, pad_token_id))
loss_ = loss_object(real, pred)
mask = tf.cast(mask, dtype=loss_.dtype)
loss_ *= mask
return tf.reduce_sum(loss_) / tf.reduce_sum(mask)
def train_step(model, loss_object, optimizer, inp, tar,
train_loss, train_accuracy, pad_token_id):
tar_inp = tar[:, :-1]
tar_real = tar[:, 1:]
enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp, tar_inp)
with tf.GradientTape() as tape:
# training=True is only needed if there are layers with different
# behavior during training versus inference (e.g. Dropout).
predictions, _ = model(inp, tar_inp,
True,
enc_padding_mask,
combined_mask,
dec_padding_mask)
loss = loss_function(tar_real, predictions, loss_object, pad_token_id)
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
train_loss(loss)
train_accuracy(tar_real, predictions)
def val_step(model, loss_object, inp, tar,
val_loss, val_accuracy, pad_token_id):
tar_inp = tar[:, :-1]
tar_real = tar[:, 1:]
enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp, tar_inp)
predictions, _ = model(inp, tar_inp,
False,
enc_padding_mask,
combined_mask,
dec_padding_mask)
loss = loss_function(tar_real, predictions, loss_object, pad_token_id)
val_loss(loss)
val_accuracy(tar_real, predictions)
def compute_bleu_score(transformer_model, dataset, user_config, tokenizer_tar, epoch):
inp_language = user_config["inp_language"]
target_language = user_config["target_language"]
checkpoint_path = user_config["transformer_checkpoint_path"]
val_aligned_path_tar = user_config["val_data_path_{}".format(target_language)]
pred_file_path = "../log/log_{}_{}/".format(inp_language, target_language) + checkpoint_path.split('/')[
-1] + "_epoch-" + str(epoch) + "_prediction_{}.txt".format(target_language)
sacrebleu_metric(transformer_model, pred_file_path, None,
tokenizer_tar, dataset,
tokenizer_tar.MAX_LENGTH)
print("-----------------------------")
compute_bleu(pred_file_path, val_aligned_path_tar, print_all_scores=False)
print("-----------------------------")
# append checkpoint and score to file name for easy reference
new_path = "../log/log_{}_{}/".format(inp_language, target_language) + checkpoint_path.split('/')[
-1] + "_epoch-" + str(epoch) + "_prediction_{}".format(target_language) + ".txt"
# append score and checkpoint name to file_name
os.rename(pred_file_path, new_path)
print("Saved translated prediction at {}".format(new_path))
def do_training(user_config):
inp_language = user_config["inp_language"]
target_language = user_config["target_language"]
print("\n****Training model from {} to {}****\n".format(inp_language, target_language))
print("****Loading tokenizers****")
# load pre-trained tokenizer
tokenizer_inp, tokenizer_tar = utils.load_tokenizers(inp_language, target_language, user_config)
print("****Loading train dataset****")
# train data loader
train_aligned_path_inp = user_config["train_data_path_{}".format(inp_language)]
train_aligned_path_tar = user_config["train_data_path_{}".format(target_language)]
train_dataloader = DataLoader(user_config["transformer_batch_size"],
train_aligned_path_inp,
train_aligned_path_tar,
tokenizer_inp,
tokenizer_tar,
inp_language,
target_language,
True)
train_dataset = train_dataloader.get_data_loader()
print("****Loading val dataset****")
# val data loader
val_aligned_path_inp = user_config["val_data_path_{}".format(inp_language)]
val_aligned_path_tar = user_config["val_data_path_{}".format(target_language)]
val_dataloader = DataLoader(user_config["transformer_batch_size"] * 2, # for fast validation increase batch size
val_aligned_path_inp,
val_aligned_path_tar,
tokenizer_inp,
tokenizer_tar,
inp_language,
target_language,
False)
val_dataset = val_dataloader.get_data_loader()
# define loss and accuracy metrics
loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True, reduction='none')
train_loss = tf.keras.metrics.Mean(name='train_loss')
train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='train_accuracy')
val_loss = tf.keras.metrics.Mean(name='val_loss')
val_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='val_accuracy')
print("****Loading transformer model****")
# load model and optimizer
transformer_model, optimizer, ckpt_manager = \
utils.load_transformer_model(user_config, tokenizer_inp, tokenizer_tar)
epochs = user_config["transformer_epochs"]
print("\nTraining model now...")
for epoch in range(epochs):
print()
start = time.time()
train_loss.reset_states()
train_accuracy.reset_states()
val_loss.reset_states()
val_accuracy.reset_states()
# inp -> english, tar -> french
for (batch, (inp, tar, _)) in enumerate(train_dataset):
train_step(transformer_model, loss_object, optimizer, inp, tar,
train_loss, train_accuracy, pad_token_id=tokenizer_tar.pad_token_id)
if batch % 50 == 0:
print('Train: Epoch {} Batch {} Loss {:.4f} Accuracy {:.4f}'.format(
epoch + 1, batch, train_loss.result(), train_accuracy.result()))
if (batch + 1) % 2200 == 0:
# inp -> english, tar -> french
for (_, (inp, tar, _)) in enumerate(val_dataset):
val_step(transformer_model, loss_object, inp, tar,
val_loss, val_accuracy, pad_token_id=tokenizer_tar.pad_token_id)
print('Batch {}: Val Loss: {:.4f}, Val Accuracy: {:.4f}\n'.format(batch, val_loss.result(),
val_accuracy.result()))
if user_config["compute_bleu"]:
print("\nComputing BLEU at batch {}: ".format(batch))
compute_bleu_score(transformer_model, val_dataset, user_config, tokenizer_tar, batch * epoch + 1)
print("After {} epochs".format(epoch + 1))
print('Train Loss: {:.4f}, Train Accuracy: {:.4f}'.format(train_loss.result(), train_accuracy.result()))
# inp -> english, tar -> french
for (batch, (inp, tar, _)) in enumerate(val_dataset):
val_step(transformer_model, loss_object, inp, tar,
val_loss, val_accuracy, pad_token_id=tokenizer_tar.pad_token_id)
print('Val Loss: {:.4f}, Val Accuracy: {:.4f}'.format(val_loss.result(), val_accuracy.result()))
print('Time taken for training epoch {}: {} secs'.format(epoch + 1, time.time() - start))
# evaluate and save model every x-epochs
ckpt_save_path = ckpt_manager.save()
print('Saving checkpoint after epoch {} at {}'.format(epoch + 1, ckpt_save_path))
if user_config["compute_bleu"]:
print("\nComputing BLEU at epoch {}: ".format(epoch + 1))
compute_bleu_score(transformer_model, val_dataset, user_config, tokenizer_tar, epoch + 1)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--config", help="Configuration file containing training parameters", type=str)
args = parser.parse_args()
user_config = utils.load_file(args.config)
seed = user_config["random_seed"]
utils.set_seed(seed)
print(json.dumps(user_config, indent=2))
do_training(user_config)
if __name__ == "__main__":
main()
|
normal
|
{
"blob_id": "7613dde4f49044fbca13acad2dd75587ef68f477",
"index": 2903,
"step-1": "<mask token>\n\n\ndef loss_function(real, pred, loss_object, pad_token_id):\n \"\"\"Calculates total loss containing cross entropy with padding ignored.\n Args:\n real: Tensor of size [batch_size, length_logits, vocab_size]\n pred: Tensor of size [batch_size, length_labels]\n loss_object: Cross entropy loss\n pad_token_id: Pad token id to ignore\n Returns:\n A scalar float tensor for loss.\n \"\"\"\n mask = tf.math.logical_not(tf.math.equal(real, pad_token_id))\n loss_ = loss_object(real, pred)\n mask = tf.cast(mask, dtype=loss_.dtype)\n loss_ *= mask\n return tf.reduce_sum(loss_) / tf.reduce_sum(mask)\n\n\ndef train_step(model, loss_object, optimizer, inp, tar, train_loss,\n train_accuracy, pad_token_id):\n tar_inp = tar[:, :-1]\n tar_real = tar[:, 1:]\n enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp,\n tar_inp)\n with tf.GradientTape() as tape:\n predictions, _ = model(inp, tar_inp, True, enc_padding_mask,\n combined_mask, dec_padding_mask)\n loss = loss_function(tar_real, predictions, loss_object, pad_token_id)\n gradients = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(gradients, model.trainable_variables))\n train_loss(loss)\n train_accuracy(tar_real, predictions)\n\n\ndef val_step(model, loss_object, inp, tar, val_loss, val_accuracy, pad_token_id\n ):\n tar_inp = tar[:, :-1]\n tar_real = tar[:, 1:]\n enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp,\n tar_inp)\n predictions, _ = model(inp, tar_inp, False, enc_padding_mask,\n combined_mask, dec_padding_mask)\n loss = loss_function(tar_real, predictions, loss_object, pad_token_id)\n val_loss(loss)\n val_accuracy(tar_real, predictions)\n\n\n<mask token>\n\n\ndef do_training(user_config):\n inp_language = user_config['inp_language']\n target_language = user_config['target_language']\n print('\\n****Training model from {} to {}****\\n'.format(inp_language,\n target_language))\n print('****Loading tokenizers****')\n tokenizer_inp, tokenizer_tar = utils.load_tokenizers(inp_language,\n target_language, user_config)\n print('****Loading train dataset****')\n train_aligned_path_inp = user_config['train_data_path_{}'.format(\n inp_language)]\n train_aligned_path_tar = user_config['train_data_path_{}'.format(\n target_language)]\n train_dataloader = DataLoader(user_config['transformer_batch_size'],\n train_aligned_path_inp, train_aligned_path_tar, tokenizer_inp,\n tokenizer_tar, inp_language, target_language, True)\n train_dataset = train_dataloader.get_data_loader()\n print('****Loading val dataset****')\n val_aligned_path_inp = user_config['val_data_path_{}'.format(inp_language)]\n val_aligned_path_tar = user_config['val_data_path_{}'.format(\n target_language)]\n val_dataloader = DataLoader(user_config['transformer_batch_size'] * 2,\n val_aligned_path_inp, val_aligned_path_tar, tokenizer_inp,\n tokenizer_tar, inp_language, target_language, False)\n val_dataset = val_dataloader.get_data_loader()\n loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits\n =True, reduction='none')\n train_loss = tf.keras.metrics.Mean(name='train_loss')\n train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name=\n 'train_accuracy')\n val_loss = tf.keras.metrics.Mean(name='val_loss')\n val_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name=\n 'val_accuracy')\n print('****Loading transformer model****')\n transformer_model, optimizer, ckpt_manager = utils.load_transformer_model(\n user_config, tokenizer_inp, tokenizer_tar)\n epochs = user_config['transformer_epochs']\n print('\\nTraining model now...')\n for epoch in range(epochs):\n print()\n start = time.time()\n train_loss.reset_states()\n train_accuracy.reset_states()\n val_loss.reset_states()\n val_accuracy.reset_states()\n for batch, (inp, tar, _) in enumerate(train_dataset):\n train_step(transformer_model, loss_object, optimizer, inp, tar,\n train_loss, train_accuracy, pad_token_id=tokenizer_tar.\n pad_token_id)\n if batch % 50 == 0:\n print('Train: Epoch {} Batch {} Loss {:.4f} Accuracy {:.4f}'\n .format(epoch + 1, batch, train_loss.result(),\n train_accuracy.result()))\n if (batch + 1) % 2200 == 0:\n for _, (inp, tar, _) in enumerate(val_dataset):\n val_step(transformer_model, loss_object, inp, tar,\n val_loss, val_accuracy, pad_token_id=tokenizer_tar.\n pad_token_id)\n print('Batch {}: Val Loss: {:.4f}, Val Accuracy: {:.4f}\\n'.\n format(batch, val_loss.result(), val_accuracy.result()))\n if user_config['compute_bleu']:\n print('\\nComputing BLEU at batch {}: '.format(batch))\n compute_bleu_score(transformer_model, val_dataset,\n user_config, tokenizer_tar, batch * epoch + 1)\n print('After {} epochs'.format(epoch + 1))\n print('Train Loss: {:.4f}, Train Accuracy: {:.4f}'.format(\n train_loss.result(), train_accuracy.result()))\n for batch, (inp, tar, _) in enumerate(val_dataset):\n val_step(transformer_model, loss_object, inp, tar, val_loss,\n val_accuracy, pad_token_id=tokenizer_tar.pad_token_id)\n print('Val Loss: {:.4f}, Val Accuracy: {:.4f}'.format(val_loss.\n result(), val_accuracy.result()))\n print('Time taken for training epoch {}: {} secs'.format(epoch + 1,\n time.time() - start))\n ckpt_save_path = ckpt_manager.save()\n print('Saving checkpoint after epoch {} at {}'.format(epoch + 1,\n ckpt_save_path))\n if user_config['compute_bleu']:\n print('\\nComputing BLEU at epoch {}: '.format(epoch + 1))\n compute_bleu_score(transformer_model, val_dataset, user_config,\n tokenizer_tar, epoch + 1)\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--config', help=\n 'Configuration file containing training parameters', type=str)\n args = parser.parse_args()\n user_config = utils.load_file(args.config)\n seed = user_config['random_seed']\n utils.set_seed(seed)\n print(json.dumps(user_config, indent=2))\n do_training(user_config)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef loss_function(real, pred, loss_object, pad_token_id):\n \"\"\"Calculates total loss containing cross entropy with padding ignored.\n Args:\n real: Tensor of size [batch_size, length_logits, vocab_size]\n pred: Tensor of size [batch_size, length_labels]\n loss_object: Cross entropy loss\n pad_token_id: Pad token id to ignore\n Returns:\n A scalar float tensor for loss.\n \"\"\"\n mask = tf.math.logical_not(tf.math.equal(real, pad_token_id))\n loss_ = loss_object(real, pred)\n mask = tf.cast(mask, dtype=loss_.dtype)\n loss_ *= mask\n return tf.reduce_sum(loss_) / tf.reduce_sum(mask)\n\n\ndef train_step(model, loss_object, optimizer, inp, tar, train_loss,\n train_accuracy, pad_token_id):\n tar_inp = tar[:, :-1]\n tar_real = tar[:, 1:]\n enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp,\n tar_inp)\n with tf.GradientTape() as tape:\n predictions, _ = model(inp, tar_inp, True, enc_padding_mask,\n combined_mask, dec_padding_mask)\n loss = loss_function(tar_real, predictions, loss_object, pad_token_id)\n gradients = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(gradients, model.trainable_variables))\n train_loss(loss)\n train_accuracy(tar_real, predictions)\n\n\ndef val_step(model, loss_object, inp, tar, val_loss, val_accuracy, pad_token_id\n ):\n tar_inp = tar[:, :-1]\n tar_real = tar[:, 1:]\n enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp,\n tar_inp)\n predictions, _ = model(inp, tar_inp, False, enc_padding_mask,\n combined_mask, dec_padding_mask)\n loss = loss_function(tar_real, predictions, loss_object, pad_token_id)\n val_loss(loss)\n val_accuracy(tar_real, predictions)\n\n\ndef compute_bleu_score(transformer_model, dataset, user_config,\n tokenizer_tar, epoch):\n inp_language = user_config['inp_language']\n target_language = user_config['target_language']\n checkpoint_path = user_config['transformer_checkpoint_path']\n val_aligned_path_tar = user_config['val_data_path_{}'.format(\n target_language)]\n pred_file_path = '../log/log_{}_{}/'.format(inp_language, target_language\n ) + checkpoint_path.split('/')[-1] + '_epoch-' + str(epoch\n ) + '_prediction_{}.txt'.format(target_language)\n sacrebleu_metric(transformer_model, pred_file_path, None, tokenizer_tar,\n dataset, tokenizer_tar.MAX_LENGTH)\n print('-----------------------------')\n compute_bleu(pred_file_path, val_aligned_path_tar, print_all_scores=False)\n print('-----------------------------')\n new_path = '../log/log_{}_{}/'.format(inp_language, target_language\n ) + checkpoint_path.split('/')[-1] + '_epoch-' + str(epoch\n ) + '_prediction_{}'.format(target_language) + '.txt'\n os.rename(pred_file_path, new_path)\n print('Saved translated prediction at {}'.format(new_path))\n\n\ndef do_training(user_config):\n inp_language = user_config['inp_language']\n target_language = user_config['target_language']\n print('\\n****Training model from {} to {}****\\n'.format(inp_language,\n target_language))\n print('****Loading tokenizers****')\n tokenizer_inp, tokenizer_tar = utils.load_tokenizers(inp_language,\n target_language, user_config)\n print('****Loading train dataset****')\n train_aligned_path_inp = user_config['train_data_path_{}'.format(\n inp_language)]\n train_aligned_path_tar = user_config['train_data_path_{}'.format(\n target_language)]\n train_dataloader = DataLoader(user_config['transformer_batch_size'],\n train_aligned_path_inp, train_aligned_path_tar, tokenizer_inp,\n tokenizer_tar, inp_language, target_language, True)\n train_dataset = train_dataloader.get_data_loader()\n print('****Loading val dataset****')\n val_aligned_path_inp = user_config['val_data_path_{}'.format(inp_language)]\n val_aligned_path_tar = user_config['val_data_path_{}'.format(\n target_language)]\n val_dataloader = DataLoader(user_config['transformer_batch_size'] * 2,\n val_aligned_path_inp, val_aligned_path_tar, tokenizer_inp,\n tokenizer_tar, inp_language, target_language, False)\n val_dataset = val_dataloader.get_data_loader()\n loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits\n =True, reduction='none')\n train_loss = tf.keras.metrics.Mean(name='train_loss')\n train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name=\n 'train_accuracy')\n val_loss = tf.keras.metrics.Mean(name='val_loss')\n val_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name=\n 'val_accuracy')\n print('****Loading transformer model****')\n transformer_model, optimizer, ckpt_manager = utils.load_transformer_model(\n user_config, tokenizer_inp, tokenizer_tar)\n epochs = user_config['transformer_epochs']\n print('\\nTraining model now...')\n for epoch in range(epochs):\n print()\n start = time.time()\n train_loss.reset_states()\n train_accuracy.reset_states()\n val_loss.reset_states()\n val_accuracy.reset_states()\n for batch, (inp, tar, _) in enumerate(train_dataset):\n train_step(transformer_model, loss_object, optimizer, inp, tar,\n train_loss, train_accuracy, pad_token_id=tokenizer_tar.\n pad_token_id)\n if batch % 50 == 0:\n print('Train: Epoch {} Batch {} Loss {:.4f} Accuracy {:.4f}'\n .format(epoch + 1, batch, train_loss.result(),\n train_accuracy.result()))\n if (batch + 1) % 2200 == 0:\n for _, (inp, tar, _) in enumerate(val_dataset):\n val_step(transformer_model, loss_object, inp, tar,\n val_loss, val_accuracy, pad_token_id=tokenizer_tar.\n pad_token_id)\n print('Batch {}: Val Loss: {:.4f}, Val Accuracy: {:.4f}\\n'.\n format(batch, val_loss.result(), val_accuracy.result()))\n if user_config['compute_bleu']:\n print('\\nComputing BLEU at batch {}: '.format(batch))\n compute_bleu_score(transformer_model, val_dataset,\n user_config, tokenizer_tar, batch * epoch + 1)\n print('After {} epochs'.format(epoch + 1))\n print('Train Loss: {:.4f}, Train Accuracy: {:.4f}'.format(\n train_loss.result(), train_accuracy.result()))\n for batch, (inp, tar, _) in enumerate(val_dataset):\n val_step(transformer_model, loss_object, inp, tar, val_loss,\n val_accuracy, pad_token_id=tokenizer_tar.pad_token_id)\n print('Val Loss: {:.4f}, Val Accuracy: {:.4f}'.format(val_loss.\n result(), val_accuracy.result()))\n print('Time taken for training epoch {}: {} secs'.format(epoch + 1,\n time.time() - start))\n ckpt_save_path = ckpt_manager.save()\n print('Saving checkpoint after epoch {} at {}'.format(epoch + 1,\n ckpt_save_path))\n if user_config['compute_bleu']:\n print('\\nComputing BLEU at epoch {}: '.format(epoch + 1))\n compute_bleu_score(transformer_model, val_dataset, user_config,\n tokenizer_tar, epoch + 1)\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--config', help=\n 'Configuration file containing training parameters', type=str)\n args = parser.parse_args()\n user_config = utils.load_file(args.config)\n seed = user_config['random_seed']\n utils.set_seed(seed)\n print(json.dumps(user_config, indent=2))\n do_training(user_config)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef loss_function(real, pred, loss_object, pad_token_id):\n \"\"\"Calculates total loss containing cross entropy with padding ignored.\n Args:\n real: Tensor of size [batch_size, length_logits, vocab_size]\n pred: Tensor of size [batch_size, length_labels]\n loss_object: Cross entropy loss\n pad_token_id: Pad token id to ignore\n Returns:\n A scalar float tensor for loss.\n \"\"\"\n mask = tf.math.logical_not(tf.math.equal(real, pad_token_id))\n loss_ = loss_object(real, pred)\n mask = tf.cast(mask, dtype=loss_.dtype)\n loss_ *= mask\n return tf.reduce_sum(loss_) / tf.reduce_sum(mask)\n\n\ndef train_step(model, loss_object, optimizer, inp, tar, train_loss,\n train_accuracy, pad_token_id):\n tar_inp = tar[:, :-1]\n tar_real = tar[:, 1:]\n enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp,\n tar_inp)\n with tf.GradientTape() as tape:\n predictions, _ = model(inp, tar_inp, True, enc_padding_mask,\n combined_mask, dec_padding_mask)\n loss = loss_function(tar_real, predictions, loss_object, pad_token_id)\n gradients = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(gradients, model.trainable_variables))\n train_loss(loss)\n train_accuracy(tar_real, predictions)\n\n\ndef val_step(model, loss_object, inp, tar, val_loss, val_accuracy, pad_token_id\n ):\n tar_inp = tar[:, :-1]\n tar_real = tar[:, 1:]\n enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp,\n tar_inp)\n predictions, _ = model(inp, tar_inp, False, enc_padding_mask,\n combined_mask, dec_padding_mask)\n loss = loss_function(tar_real, predictions, loss_object, pad_token_id)\n val_loss(loss)\n val_accuracy(tar_real, predictions)\n\n\ndef compute_bleu_score(transformer_model, dataset, user_config,\n tokenizer_tar, epoch):\n inp_language = user_config['inp_language']\n target_language = user_config['target_language']\n checkpoint_path = user_config['transformer_checkpoint_path']\n val_aligned_path_tar = user_config['val_data_path_{}'.format(\n target_language)]\n pred_file_path = '../log/log_{}_{}/'.format(inp_language, target_language\n ) + checkpoint_path.split('/')[-1] + '_epoch-' + str(epoch\n ) + '_prediction_{}.txt'.format(target_language)\n sacrebleu_metric(transformer_model, pred_file_path, None, tokenizer_tar,\n dataset, tokenizer_tar.MAX_LENGTH)\n print('-----------------------------')\n compute_bleu(pred_file_path, val_aligned_path_tar, print_all_scores=False)\n print('-----------------------------')\n new_path = '../log/log_{}_{}/'.format(inp_language, target_language\n ) + checkpoint_path.split('/')[-1] + '_epoch-' + str(epoch\n ) + '_prediction_{}'.format(target_language) + '.txt'\n os.rename(pred_file_path, new_path)\n print('Saved translated prediction at {}'.format(new_path))\n\n\ndef do_training(user_config):\n inp_language = user_config['inp_language']\n target_language = user_config['target_language']\n print('\\n****Training model from {} to {}****\\n'.format(inp_language,\n target_language))\n print('****Loading tokenizers****')\n tokenizer_inp, tokenizer_tar = utils.load_tokenizers(inp_language,\n target_language, user_config)\n print('****Loading train dataset****')\n train_aligned_path_inp = user_config['train_data_path_{}'.format(\n inp_language)]\n train_aligned_path_tar = user_config['train_data_path_{}'.format(\n target_language)]\n train_dataloader = DataLoader(user_config['transformer_batch_size'],\n train_aligned_path_inp, train_aligned_path_tar, tokenizer_inp,\n tokenizer_tar, inp_language, target_language, True)\n train_dataset = train_dataloader.get_data_loader()\n print('****Loading val dataset****')\n val_aligned_path_inp = user_config['val_data_path_{}'.format(inp_language)]\n val_aligned_path_tar = user_config['val_data_path_{}'.format(\n target_language)]\n val_dataloader = DataLoader(user_config['transformer_batch_size'] * 2,\n val_aligned_path_inp, val_aligned_path_tar, tokenizer_inp,\n tokenizer_tar, inp_language, target_language, False)\n val_dataset = val_dataloader.get_data_loader()\n loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits\n =True, reduction='none')\n train_loss = tf.keras.metrics.Mean(name='train_loss')\n train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name=\n 'train_accuracy')\n val_loss = tf.keras.metrics.Mean(name='val_loss')\n val_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name=\n 'val_accuracy')\n print('****Loading transformer model****')\n transformer_model, optimizer, ckpt_manager = utils.load_transformer_model(\n user_config, tokenizer_inp, tokenizer_tar)\n epochs = user_config['transformer_epochs']\n print('\\nTraining model now...')\n for epoch in range(epochs):\n print()\n start = time.time()\n train_loss.reset_states()\n train_accuracy.reset_states()\n val_loss.reset_states()\n val_accuracy.reset_states()\n for batch, (inp, tar, _) in enumerate(train_dataset):\n train_step(transformer_model, loss_object, optimizer, inp, tar,\n train_loss, train_accuracy, pad_token_id=tokenizer_tar.\n pad_token_id)\n if batch % 50 == 0:\n print('Train: Epoch {} Batch {} Loss {:.4f} Accuracy {:.4f}'\n .format(epoch + 1, batch, train_loss.result(),\n train_accuracy.result()))\n if (batch + 1) % 2200 == 0:\n for _, (inp, tar, _) in enumerate(val_dataset):\n val_step(transformer_model, loss_object, inp, tar,\n val_loss, val_accuracy, pad_token_id=tokenizer_tar.\n pad_token_id)\n print('Batch {}: Val Loss: {:.4f}, Val Accuracy: {:.4f}\\n'.\n format(batch, val_loss.result(), val_accuracy.result()))\n if user_config['compute_bleu']:\n print('\\nComputing BLEU at batch {}: '.format(batch))\n compute_bleu_score(transformer_model, val_dataset,\n user_config, tokenizer_tar, batch * epoch + 1)\n print('After {} epochs'.format(epoch + 1))\n print('Train Loss: {:.4f}, Train Accuracy: {:.4f}'.format(\n train_loss.result(), train_accuracy.result()))\n for batch, (inp, tar, _) in enumerate(val_dataset):\n val_step(transformer_model, loss_object, inp, tar, val_loss,\n val_accuracy, pad_token_id=tokenizer_tar.pad_token_id)\n print('Val Loss: {:.4f}, Val Accuracy: {:.4f}'.format(val_loss.\n result(), val_accuracy.result()))\n print('Time taken for training epoch {}: {} secs'.format(epoch + 1,\n time.time() - start))\n ckpt_save_path = ckpt_manager.save()\n print('Saving checkpoint after epoch {} at {}'.format(epoch + 1,\n ckpt_save_path))\n if user_config['compute_bleu']:\n print('\\nComputing BLEU at epoch {}: '.format(epoch + 1))\n compute_bleu_score(transformer_model, val_dataset, user_config,\n tokenizer_tar, epoch + 1)\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--config', help=\n 'Configuration file containing training parameters', type=str)\n args = parser.parse_args()\n user_config = utils.load_file(args.config)\n seed = user_config['random_seed']\n utils.set_seed(seed)\n print(json.dumps(user_config, indent=2))\n do_training(user_config)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import time\nimport argparse\nimport utils\nfrom data_loader import DataLoader\nfrom generate_model_predictions import sacrebleu_metric, compute_bleu\nimport tensorflow as tf\nimport os\nimport json\nfrom transformer import create_masks\n\n\ndef loss_function(real, pred, loss_object, pad_token_id):\n \"\"\"Calculates total loss containing cross entropy with padding ignored.\n Args:\n real: Tensor of size [batch_size, length_logits, vocab_size]\n pred: Tensor of size [batch_size, length_labels]\n loss_object: Cross entropy loss\n pad_token_id: Pad token id to ignore\n Returns:\n A scalar float tensor for loss.\n \"\"\"\n mask = tf.math.logical_not(tf.math.equal(real, pad_token_id))\n loss_ = loss_object(real, pred)\n mask = tf.cast(mask, dtype=loss_.dtype)\n loss_ *= mask\n return tf.reduce_sum(loss_) / tf.reduce_sum(mask)\n\n\ndef train_step(model, loss_object, optimizer, inp, tar, train_loss,\n train_accuracy, pad_token_id):\n tar_inp = tar[:, :-1]\n tar_real = tar[:, 1:]\n enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp,\n tar_inp)\n with tf.GradientTape() as tape:\n predictions, _ = model(inp, tar_inp, True, enc_padding_mask,\n combined_mask, dec_padding_mask)\n loss = loss_function(tar_real, predictions, loss_object, pad_token_id)\n gradients = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(gradients, model.trainable_variables))\n train_loss(loss)\n train_accuracy(tar_real, predictions)\n\n\ndef val_step(model, loss_object, inp, tar, val_loss, val_accuracy, pad_token_id\n ):\n tar_inp = tar[:, :-1]\n tar_real = tar[:, 1:]\n enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp,\n tar_inp)\n predictions, _ = model(inp, tar_inp, False, enc_padding_mask,\n combined_mask, dec_padding_mask)\n loss = loss_function(tar_real, predictions, loss_object, pad_token_id)\n val_loss(loss)\n val_accuracy(tar_real, predictions)\n\n\ndef compute_bleu_score(transformer_model, dataset, user_config,\n tokenizer_tar, epoch):\n inp_language = user_config['inp_language']\n target_language = user_config['target_language']\n checkpoint_path = user_config['transformer_checkpoint_path']\n val_aligned_path_tar = user_config['val_data_path_{}'.format(\n target_language)]\n pred_file_path = '../log/log_{}_{}/'.format(inp_language, target_language\n ) + checkpoint_path.split('/')[-1] + '_epoch-' + str(epoch\n ) + '_prediction_{}.txt'.format(target_language)\n sacrebleu_metric(transformer_model, pred_file_path, None, tokenizer_tar,\n dataset, tokenizer_tar.MAX_LENGTH)\n print('-----------------------------')\n compute_bleu(pred_file_path, val_aligned_path_tar, print_all_scores=False)\n print('-----------------------------')\n new_path = '../log/log_{}_{}/'.format(inp_language, target_language\n ) + checkpoint_path.split('/')[-1] + '_epoch-' + str(epoch\n ) + '_prediction_{}'.format(target_language) + '.txt'\n os.rename(pred_file_path, new_path)\n print('Saved translated prediction at {}'.format(new_path))\n\n\ndef do_training(user_config):\n inp_language = user_config['inp_language']\n target_language = user_config['target_language']\n print('\\n****Training model from {} to {}****\\n'.format(inp_language,\n target_language))\n print('****Loading tokenizers****')\n tokenizer_inp, tokenizer_tar = utils.load_tokenizers(inp_language,\n target_language, user_config)\n print('****Loading train dataset****')\n train_aligned_path_inp = user_config['train_data_path_{}'.format(\n inp_language)]\n train_aligned_path_tar = user_config['train_data_path_{}'.format(\n target_language)]\n train_dataloader = DataLoader(user_config['transformer_batch_size'],\n train_aligned_path_inp, train_aligned_path_tar, tokenizer_inp,\n tokenizer_tar, inp_language, target_language, True)\n train_dataset = train_dataloader.get_data_loader()\n print('****Loading val dataset****')\n val_aligned_path_inp = user_config['val_data_path_{}'.format(inp_language)]\n val_aligned_path_tar = user_config['val_data_path_{}'.format(\n target_language)]\n val_dataloader = DataLoader(user_config['transformer_batch_size'] * 2,\n val_aligned_path_inp, val_aligned_path_tar, tokenizer_inp,\n tokenizer_tar, inp_language, target_language, False)\n val_dataset = val_dataloader.get_data_loader()\n loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits\n =True, reduction='none')\n train_loss = tf.keras.metrics.Mean(name='train_loss')\n train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name=\n 'train_accuracy')\n val_loss = tf.keras.metrics.Mean(name='val_loss')\n val_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name=\n 'val_accuracy')\n print('****Loading transformer model****')\n transformer_model, optimizer, ckpt_manager = utils.load_transformer_model(\n user_config, tokenizer_inp, tokenizer_tar)\n epochs = user_config['transformer_epochs']\n print('\\nTraining model now...')\n for epoch in range(epochs):\n print()\n start = time.time()\n train_loss.reset_states()\n train_accuracy.reset_states()\n val_loss.reset_states()\n val_accuracy.reset_states()\n for batch, (inp, tar, _) in enumerate(train_dataset):\n train_step(transformer_model, loss_object, optimizer, inp, tar,\n train_loss, train_accuracy, pad_token_id=tokenizer_tar.\n pad_token_id)\n if batch % 50 == 0:\n print('Train: Epoch {} Batch {} Loss {:.4f} Accuracy {:.4f}'\n .format(epoch + 1, batch, train_loss.result(),\n train_accuracy.result()))\n if (batch + 1) % 2200 == 0:\n for _, (inp, tar, _) in enumerate(val_dataset):\n val_step(transformer_model, loss_object, inp, tar,\n val_loss, val_accuracy, pad_token_id=tokenizer_tar.\n pad_token_id)\n print('Batch {}: Val Loss: {:.4f}, Val Accuracy: {:.4f}\\n'.\n format(batch, val_loss.result(), val_accuracy.result()))\n if user_config['compute_bleu']:\n print('\\nComputing BLEU at batch {}: '.format(batch))\n compute_bleu_score(transformer_model, val_dataset,\n user_config, tokenizer_tar, batch * epoch + 1)\n print('After {} epochs'.format(epoch + 1))\n print('Train Loss: {:.4f}, Train Accuracy: {:.4f}'.format(\n train_loss.result(), train_accuracy.result()))\n for batch, (inp, tar, _) in enumerate(val_dataset):\n val_step(transformer_model, loss_object, inp, tar, val_loss,\n val_accuracy, pad_token_id=tokenizer_tar.pad_token_id)\n print('Val Loss: {:.4f}, Val Accuracy: {:.4f}'.format(val_loss.\n result(), val_accuracy.result()))\n print('Time taken for training epoch {}: {} secs'.format(epoch + 1,\n time.time() - start))\n ckpt_save_path = ckpt_manager.save()\n print('Saving checkpoint after epoch {} at {}'.format(epoch + 1,\n ckpt_save_path))\n if user_config['compute_bleu']:\n print('\\nComputing BLEU at epoch {}: '.format(epoch + 1))\n compute_bleu_score(transformer_model, val_dataset, user_config,\n tokenizer_tar, epoch + 1)\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('--config', help=\n 'Configuration file containing training parameters', type=str)\n args = parser.parse_args()\n user_config = utils.load_file(args.config)\n seed = user_config['random_seed']\n utils.set_seed(seed)\n print(json.dumps(user_config, indent=2))\n do_training(user_config)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "import time\nimport argparse\nimport utils\nfrom data_loader import DataLoader\nfrom generate_model_predictions import sacrebleu_metric, compute_bleu\nimport tensorflow as tf\nimport os\nimport json\nfrom transformer import create_masks\n\n\n# Since the target sequences are padded, it is important\n# to apply a padding mask when calculating the loss.\ndef loss_function(real, pred, loss_object, pad_token_id):\n \"\"\"Calculates total loss containing cross entropy with padding ignored.\n Args:\n real: Tensor of size [batch_size, length_logits, vocab_size]\n pred: Tensor of size [batch_size, length_labels]\n loss_object: Cross entropy loss\n pad_token_id: Pad token id to ignore\n Returns:\n A scalar float tensor for loss.\n \"\"\"\n mask = tf.math.logical_not(tf.math.equal(real, pad_token_id))\n loss_ = loss_object(real, pred)\n mask = tf.cast(mask, dtype=loss_.dtype)\n loss_ *= mask\n return tf.reduce_sum(loss_) / tf.reduce_sum(mask)\n\n\ndef train_step(model, loss_object, optimizer, inp, tar,\n train_loss, train_accuracy, pad_token_id):\n tar_inp = tar[:, :-1]\n tar_real = tar[:, 1:]\n\n enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp, tar_inp)\n\n with tf.GradientTape() as tape:\n # training=True is only needed if there are layers with different\n # behavior during training versus inference (e.g. Dropout).\n predictions, _ = model(inp, tar_inp,\n True,\n enc_padding_mask,\n combined_mask,\n dec_padding_mask)\n loss = loss_function(tar_real, predictions, loss_object, pad_token_id)\n\n gradients = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(gradients, model.trainable_variables))\n\n train_loss(loss)\n train_accuracy(tar_real, predictions)\n\n\ndef val_step(model, loss_object, inp, tar,\n val_loss, val_accuracy, pad_token_id):\n tar_inp = tar[:, :-1]\n tar_real = tar[:, 1:]\n\n enc_padding_mask, combined_mask, dec_padding_mask = create_masks(inp, tar_inp)\n\n predictions, _ = model(inp, tar_inp,\n False,\n enc_padding_mask,\n combined_mask,\n dec_padding_mask)\n loss = loss_function(tar_real, predictions, loss_object, pad_token_id)\n\n val_loss(loss)\n val_accuracy(tar_real, predictions)\n\n\ndef compute_bleu_score(transformer_model, dataset, user_config, tokenizer_tar, epoch):\n inp_language = user_config[\"inp_language\"]\n target_language = user_config[\"target_language\"]\n checkpoint_path = user_config[\"transformer_checkpoint_path\"]\n val_aligned_path_tar = user_config[\"val_data_path_{}\".format(target_language)]\n pred_file_path = \"../log/log_{}_{}/\".format(inp_language, target_language) + checkpoint_path.split('/')[\n -1] + \"_epoch-\" + str(epoch) + \"_prediction_{}.txt\".format(target_language)\n\n sacrebleu_metric(transformer_model, pred_file_path, None,\n tokenizer_tar, dataset,\n tokenizer_tar.MAX_LENGTH)\n print(\"-----------------------------\")\n compute_bleu(pred_file_path, val_aligned_path_tar, print_all_scores=False)\n print(\"-----------------------------\")\n\n # append checkpoint and score to file name for easy reference\n new_path = \"../log/log_{}_{}/\".format(inp_language, target_language) + checkpoint_path.split('/')[\n -1] + \"_epoch-\" + str(epoch) + \"_prediction_{}\".format(target_language) + \".txt\"\n # append score and checkpoint name to file_name\n os.rename(pred_file_path, new_path)\n print(\"Saved translated prediction at {}\".format(new_path))\n\n\ndef do_training(user_config):\n inp_language = user_config[\"inp_language\"]\n target_language = user_config[\"target_language\"]\n\n print(\"\\n****Training model from {} to {}****\\n\".format(inp_language, target_language))\n\n print(\"****Loading tokenizers****\")\n # load pre-trained tokenizer\n tokenizer_inp, tokenizer_tar = utils.load_tokenizers(inp_language, target_language, user_config)\n\n print(\"****Loading train dataset****\")\n # train data loader\n train_aligned_path_inp = user_config[\"train_data_path_{}\".format(inp_language)]\n train_aligned_path_tar = user_config[\"train_data_path_{}\".format(target_language)]\n train_dataloader = DataLoader(user_config[\"transformer_batch_size\"],\n train_aligned_path_inp,\n train_aligned_path_tar,\n tokenizer_inp,\n tokenizer_tar,\n inp_language,\n target_language,\n True)\n train_dataset = train_dataloader.get_data_loader()\n\n print(\"****Loading val dataset****\")\n # val data loader\n val_aligned_path_inp = user_config[\"val_data_path_{}\".format(inp_language)]\n val_aligned_path_tar = user_config[\"val_data_path_{}\".format(target_language)]\n val_dataloader = DataLoader(user_config[\"transformer_batch_size\"] * 2, # for fast validation increase batch size\n val_aligned_path_inp,\n val_aligned_path_tar,\n tokenizer_inp,\n tokenizer_tar,\n inp_language,\n target_language,\n False)\n val_dataset = val_dataloader.get_data_loader()\n\n # define loss and accuracy metrics\n loss_object = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True, reduction='none')\n train_loss = tf.keras.metrics.Mean(name='train_loss')\n train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='train_accuracy')\n val_loss = tf.keras.metrics.Mean(name='val_loss')\n val_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='val_accuracy')\n\n print(\"****Loading transformer model****\")\n # load model and optimizer\n transformer_model, optimizer, ckpt_manager = \\\n utils.load_transformer_model(user_config, tokenizer_inp, tokenizer_tar)\n\n epochs = user_config[\"transformer_epochs\"]\n print(\"\\nTraining model now...\")\n for epoch in range(epochs):\n print()\n start = time.time()\n train_loss.reset_states()\n train_accuracy.reset_states()\n val_loss.reset_states()\n val_accuracy.reset_states()\n\n # inp -> english, tar -> french\n for (batch, (inp, tar, _)) in enumerate(train_dataset):\n train_step(transformer_model, loss_object, optimizer, inp, tar,\n train_loss, train_accuracy, pad_token_id=tokenizer_tar.pad_token_id)\n\n if batch % 50 == 0:\n print('Train: Epoch {} Batch {} Loss {:.4f} Accuracy {:.4f}'.format(\n epoch + 1, batch, train_loss.result(), train_accuracy.result()))\n\n if (batch + 1) % 2200 == 0:\n # inp -> english, tar -> french\n for (_, (inp, tar, _)) in enumerate(val_dataset):\n val_step(transformer_model, loss_object, inp, tar,\n val_loss, val_accuracy, pad_token_id=tokenizer_tar.pad_token_id)\n print('Batch {}: Val Loss: {:.4f}, Val Accuracy: {:.4f}\\n'.format(batch, val_loss.result(),\n val_accuracy.result()))\n if user_config[\"compute_bleu\"]:\n print(\"\\nComputing BLEU at batch {}: \".format(batch))\n compute_bleu_score(transformer_model, val_dataset, user_config, tokenizer_tar, batch * epoch + 1)\n\n print(\"After {} epochs\".format(epoch + 1))\n print('Train Loss: {:.4f}, Train Accuracy: {:.4f}'.format(train_loss.result(), train_accuracy.result()))\n\n # inp -> english, tar -> french\n for (batch, (inp, tar, _)) in enumerate(val_dataset):\n val_step(transformer_model, loss_object, inp, tar,\n val_loss, val_accuracy, pad_token_id=tokenizer_tar.pad_token_id)\n print('Val Loss: {:.4f}, Val Accuracy: {:.4f}'.format(val_loss.result(), val_accuracy.result()))\n\n print('Time taken for training epoch {}: {} secs'.format(epoch + 1, time.time() - start))\n\n # evaluate and save model every x-epochs\n ckpt_save_path = ckpt_manager.save()\n print('Saving checkpoint after epoch {} at {}'.format(epoch + 1, ckpt_save_path))\n if user_config[\"compute_bleu\"]:\n print(\"\\nComputing BLEU at epoch {}: \".format(epoch + 1))\n compute_bleu_score(transformer_model, val_dataset, user_config, tokenizer_tar, epoch + 1)\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--config\", help=\"Configuration file containing training parameters\", type=str)\n args = parser.parse_args()\n user_config = utils.load_file(args.config)\n seed = user_config[\"random_seed\"]\n utils.set_seed(seed)\n print(json.dumps(user_config, indent=2))\n do_training(user_config)\n\n\nif __name__ == \"__main__\":\n main()\n",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
def load_skeleton(mat_path):
mat_data = scipy.io.loadmat(mat_path)['skel'][0, 0]
skeleton = OrderedDict()
bone_names = mat_data[1].tolist()
for i, bone in enumerate(bone_names):
bone = bone.strip()
if bone == 'Site':
bone = bone_names[i - 1].strip() + bone
skeleton[bone] = {'offset': [], 'parent': [], 'children': []}
parent_ids = mat_data[2][0]
offsets = mat_data[3]
for i, bone in enumerate(skeleton.keys()):
if bone != 'root':
parent = list(skeleton.keys())[parent_ids[i] - 1]
skeleton[bone]['parent'] = parent
skeleton[parent]['children'].append(bone)
skeleton[bone]['offset'] = offsets[i, :]
return skeleton
<|reserved_special_token_0|>
def write_bvh(skeleton, hierarchy, motion_data_all, out):
for file_name, motion_data in motion_data_all.items():
joint_quarternions = motion_data['joint_quarternions']
root_pos = motion_data['root_position']
frames = []
for i in range(joint_quarternions.shape[0]):
root_pos_i = root_pos[i]
frame = '{0:.05f} {1:.05f} {2:.05f} '.format(*root_pos_i.tolist())
for j in range(joint_quarternions.shape[1]):
if list(skeleton.keys())[j].endswith('Site'):
continue
R_ij = quaternion_to_rotation_mat(joint_quarternions[i, j,
3], joint_quarternions[i, j, 2], joint_quarternions[i,
j, 1], joint_quarternions[i, j, 0])
euler_ij = rotation_mat_to_euler(R_ij)
frame += '{0:.05f} {1:.05f} {2:.05f} '.format(*list(map(lambda
s: s * (180.0 / math.pi), euler_ij.tolist())))
frame += '\r\n'
frames.append(frame)
with open(os.path.join(out, file_name), 'w') as f:
f.writelines(hierarchy)
f.write('MOTION\r\n')
frames[0] = 'Frames: {0}\r\nFrame Time: 0.0083333\r\n'.format(
joint_quarternions.shape[0]) + frames[0]
f.writelines(frames)
print(os.path.join(out, file_name))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('out', type=str)
args = parser.parse_args()
out = args.out
motion_data_all = load_motion(
'../../motiongan/data/style-dataset/style_motion_database.mat', out)
skeleton = load_skeleton('../../motiongan/data/style-dataset/skeleton.mat')
hierarchy = construct_hierarchy(skeleton)
write_bvh(skeleton, hierarchy, motion_data_all, out)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def load_motion(mat_path, out):
mat_data = scipy.io.loadmat(mat_path)['motion_database']
file_nums = mat_data.shape[1]
motion_data_all = {}
for f_id in range(file_nums):
motion_data = {}
motion_data['style'] = mat_data[0, f_id][0][0]
motion_data['motion_type'] = mat_data[0, f_id][1][0]
full_path = mat_data[0, f_id][2][0, 0][0][0]
file_name = full_path.split('\\')[-1]
frame_nums = mat_data[0, f_id][2].shape[1]
root_pos = np.zeros((frame_nums, 3))
joint_nums = mat_data[0, f_id][2][0, 0][2].shape[0]
motion_data['joint_nums'] = joint_nums
joint_quarternions = np.zeros((frame_nums, joint_nums, 4))
for i in range(frame_nums):
root_pos[i, :] = mat_data[0, f_id][2][0, i][1]
joint_quarternions[i, :, :] = mat_data[0, f_id][2][0, i][2]
motion_data['root_position'] = root_pos
motion_data['joint_quarternions'] = joint_quarternions
motion_data['foot_contact'] = mat_data[0, f_id][3][0]
with open(os.path.join(out, os.path.splitext(file_name)[0] + '.pkl'
), 'wb') as f:
pickle.dump(motion_data, f)
motion_data_all[file_name] = motion_data
return motion_data_all
def load_skeleton(mat_path):
mat_data = scipy.io.loadmat(mat_path)['skel'][0, 0]
skeleton = OrderedDict()
bone_names = mat_data[1].tolist()
for i, bone in enumerate(bone_names):
bone = bone.strip()
if bone == 'Site':
bone = bone_names[i - 1].strip() + bone
skeleton[bone] = {'offset': [], 'parent': [], 'children': []}
parent_ids = mat_data[2][0]
offsets = mat_data[3]
for i, bone in enumerate(skeleton.keys()):
if bone != 'root':
parent = list(skeleton.keys())[parent_ids[i] - 1]
skeleton[bone]['parent'] = parent
skeleton[parent]['children'].append(bone)
skeleton[bone]['offset'] = offsets[i, :]
return skeleton
def construct_hierarchy(skeleton):
hierarchy = ['HIERARCHY\r\n']
level = 0
for i, bone in enumerate(skeleton.keys()):
if bone == 'root':
skeleton[bone]['level'] = 0
else:
parent = skeleton[bone]['parent']
skeleton[bone]['level'] = skeleton[parent]['level'] + 1
for i, bone in enumerate(skeleton.keys()):
offset = skeleton[bone]['offset']
if bone == 'root':
hierarchy.append('ROOT root\r\n')
hierarchy.append('{\r\n')
hierarchy.append('\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.
format(offset[0], offset[1], offset[2]))
hierarchy.append(
'\tCHANNELS 6 Xposition Yposition Zposition Zrotation Yrotation Xrotation\r\n'
)
elif bone.endswith('Site'):
parent = skeleton[bone]['parent']
level = skeleton[bone]['level']
tabs = '\t' * level
hierarchy.append(tabs + 'End Site\r\n')
hierarchy.append(tabs + '{\r\n')
hierarchy.append(tabs +
'\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.format(offset[0],
offset[1], offset[2]))
hierarchy.append(tabs + '}\r\n')
if i == len(skeleton.keys()) - 1:
while level > 0:
level -= 1
hierarchy.append('\t' * level + '}\r\n')
else:
for _ in range(level - skeleton[list(skeleton.keys())[i + 1
]]['level']):
level -= 1
hierarchy.append('\t' * level + '}\r\n')
else:
parent = skeleton[bone]['parent']
level = skeleton[bone]['level']
tabs = '\t' * level
hierarchy.append(tabs + 'JOINT {0}'.format(bone) + '\r\n')
hierarchy.append(tabs + '{\r\n')
hierarchy.append(tabs +
'\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.format(offset[0],
offset[1], offset[2]))
hierarchy.append(tabs +
'\tCHANNELS 3 Zrotation Yrotation Xrotation\r\n')
return hierarchy
def write_bvh(skeleton, hierarchy, motion_data_all, out):
for file_name, motion_data in motion_data_all.items():
joint_quarternions = motion_data['joint_quarternions']
root_pos = motion_data['root_position']
frames = []
for i in range(joint_quarternions.shape[0]):
root_pos_i = root_pos[i]
frame = '{0:.05f} {1:.05f} {2:.05f} '.format(*root_pos_i.tolist())
for j in range(joint_quarternions.shape[1]):
if list(skeleton.keys())[j].endswith('Site'):
continue
R_ij = quaternion_to_rotation_mat(joint_quarternions[i, j,
3], joint_quarternions[i, j, 2], joint_quarternions[i,
j, 1], joint_quarternions[i, j, 0])
euler_ij = rotation_mat_to_euler(R_ij)
frame += '{0:.05f} {1:.05f} {2:.05f} '.format(*list(map(lambda
s: s * (180.0 / math.pi), euler_ij.tolist())))
frame += '\r\n'
frames.append(frame)
with open(os.path.join(out, file_name), 'w') as f:
f.writelines(hierarchy)
f.write('MOTION\r\n')
frames[0] = 'Frames: {0}\r\nFrame Time: 0.0083333\r\n'.format(
joint_quarternions.shape[0]) + frames[0]
f.writelines(frames)
print(os.path.join(out, file_name))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('out', type=str)
args = parser.parse_args()
out = args.out
motion_data_all = load_motion(
'../../motiongan/data/style-dataset/style_motion_database.mat', out)
skeleton = load_skeleton('../../motiongan/data/style-dataset/skeleton.mat')
hierarchy = construct_hierarchy(skeleton)
write_bvh(skeleton, hierarchy, motion_data_all, out)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
<|reserved_special_token_0|>
def load_motion(mat_path, out):
mat_data = scipy.io.loadmat(mat_path)['motion_database']
file_nums = mat_data.shape[1]
motion_data_all = {}
for f_id in range(file_nums):
motion_data = {}
motion_data['style'] = mat_data[0, f_id][0][0]
motion_data['motion_type'] = mat_data[0, f_id][1][0]
full_path = mat_data[0, f_id][2][0, 0][0][0]
file_name = full_path.split('\\')[-1]
frame_nums = mat_data[0, f_id][2].shape[1]
root_pos = np.zeros((frame_nums, 3))
joint_nums = mat_data[0, f_id][2][0, 0][2].shape[0]
motion_data['joint_nums'] = joint_nums
joint_quarternions = np.zeros((frame_nums, joint_nums, 4))
for i in range(frame_nums):
root_pos[i, :] = mat_data[0, f_id][2][0, i][1]
joint_quarternions[i, :, :] = mat_data[0, f_id][2][0, i][2]
motion_data['root_position'] = root_pos
motion_data['joint_quarternions'] = joint_quarternions
motion_data['foot_contact'] = mat_data[0, f_id][3][0]
with open(os.path.join(out, os.path.splitext(file_name)[0] + '.pkl'
), 'wb') as f:
pickle.dump(motion_data, f)
motion_data_all[file_name] = motion_data
return motion_data_all
def load_skeleton(mat_path):
mat_data = scipy.io.loadmat(mat_path)['skel'][0, 0]
skeleton = OrderedDict()
bone_names = mat_data[1].tolist()
for i, bone in enumerate(bone_names):
bone = bone.strip()
if bone == 'Site':
bone = bone_names[i - 1].strip() + bone
skeleton[bone] = {'offset': [], 'parent': [], 'children': []}
parent_ids = mat_data[2][0]
offsets = mat_data[3]
for i, bone in enumerate(skeleton.keys()):
if bone != 'root':
parent = list(skeleton.keys())[parent_ids[i] - 1]
skeleton[bone]['parent'] = parent
skeleton[parent]['children'].append(bone)
skeleton[bone]['offset'] = offsets[i, :]
return skeleton
def construct_hierarchy(skeleton):
hierarchy = ['HIERARCHY\r\n']
level = 0
for i, bone in enumerate(skeleton.keys()):
if bone == 'root':
skeleton[bone]['level'] = 0
else:
parent = skeleton[bone]['parent']
skeleton[bone]['level'] = skeleton[parent]['level'] + 1
for i, bone in enumerate(skeleton.keys()):
offset = skeleton[bone]['offset']
if bone == 'root':
hierarchy.append('ROOT root\r\n')
hierarchy.append('{\r\n')
hierarchy.append('\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.
format(offset[0], offset[1], offset[2]))
hierarchy.append(
'\tCHANNELS 6 Xposition Yposition Zposition Zrotation Yrotation Xrotation\r\n'
)
elif bone.endswith('Site'):
parent = skeleton[bone]['parent']
level = skeleton[bone]['level']
tabs = '\t' * level
hierarchy.append(tabs + 'End Site\r\n')
hierarchy.append(tabs + '{\r\n')
hierarchy.append(tabs +
'\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.format(offset[0],
offset[1], offset[2]))
hierarchy.append(tabs + '}\r\n')
if i == len(skeleton.keys()) - 1:
while level > 0:
level -= 1
hierarchy.append('\t' * level + '}\r\n')
else:
for _ in range(level - skeleton[list(skeleton.keys())[i + 1
]]['level']):
level -= 1
hierarchy.append('\t' * level + '}\r\n')
else:
parent = skeleton[bone]['parent']
level = skeleton[bone]['level']
tabs = '\t' * level
hierarchy.append(tabs + 'JOINT {0}'.format(bone) + '\r\n')
hierarchy.append(tabs + '{\r\n')
hierarchy.append(tabs +
'\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.format(offset[0],
offset[1], offset[2]))
hierarchy.append(tabs +
'\tCHANNELS 3 Zrotation Yrotation Xrotation\r\n')
return hierarchy
def write_bvh(skeleton, hierarchy, motion_data_all, out):
for file_name, motion_data in motion_data_all.items():
joint_quarternions = motion_data['joint_quarternions']
root_pos = motion_data['root_position']
frames = []
for i in range(joint_quarternions.shape[0]):
root_pos_i = root_pos[i]
frame = '{0:.05f} {1:.05f} {2:.05f} '.format(*root_pos_i.tolist())
for j in range(joint_quarternions.shape[1]):
if list(skeleton.keys())[j].endswith('Site'):
continue
R_ij = quaternion_to_rotation_mat(joint_quarternions[i, j,
3], joint_quarternions[i, j, 2], joint_quarternions[i,
j, 1], joint_quarternions[i, j, 0])
euler_ij = rotation_mat_to_euler(R_ij)
frame += '{0:.05f} {1:.05f} {2:.05f} '.format(*list(map(lambda
s: s * (180.0 / math.pi), euler_ij.tolist())))
frame += '\r\n'
frames.append(frame)
with open(os.path.join(out, file_name), 'w') as f:
f.writelines(hierarchy)
f.write('MOTION\r\n')
frames[0] = 'Frames: {0}\r\nFrame Time: 0.0083333\r\n'.format(
joint_quarternions.shape[0]) + frames[0]
f.writelines(frames)
print(os.path.join(out, file_name))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('out', type=str)
args = parser.parse_args()
out = args.out
motion_data_all = load_motion(
'../../motiongan/data/style-dataset/style_motion_database.mat', out)
skeleton = load_skeleton('../../motiongan/data/style-dataset/skeleton.mat')
hierarchy = construct_hierarchy(skeleton)
write_bvh(skeleton, hierarchy, motion_data_all, out)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import argparse
import math
import numpy as np
from collections import OrderedDict
import scipy.io
import pickle
from core.utils.euler_to_quaternion import quaternion_to_rotation_mat, rotation_mat_to_euler
def load_motion(mat_path, out):
mat_data = scipy.io.loadmat(mat_path)['motion_database']
file_nums = mat_data.shape[1]
motion_data_all = {}
for f_id in range(file_nums):
motion_data = {}
motion_data['style'] = mat_data[0, f_id][0][0]
motion_data['motion_type'] = mat_data[0, f_id][1][0]
full_path = mat_data[0, f_id][2][0, 0][0][0]
file_name = full_path.split('\\')[-1]
frame_nums = mat_data[0, f_id][2].shape[1]
root_pos = np.zeros((frame_nums, 3))
joint_nums = mat_data[0, f_id][2][0, 0][2].shape[0]
motion_data['joint_nums'] = joint_nums
joint_quarternions = np.zeros((frame_nums, joint_nums, 4))
for i in range(frame_nums):
root_pos[i, :] = mat_data[0, f_id][2][0, i][1]
joint_quarternions[i, :, :] = mat_data[0, f_id][2][0, i][2]
motion_data['root_position'] = root_pos
motion_data['joint_quarternions'] = joint_quarternions
motion_data['foot_contact'] = mat_data[0, f_id][3][0]
with open(os.path.join(out, os.path.splitext(file_name)[0] + '.pkl'
), 'wb') as f:
pickle.dump(motion_data, f)
motion_data_all[file_name] = motion_data
return motion_data_all
def load_skeleton(mat_path):
mat_data = scipy.io.loadmat(mat_path)['skel'][0, 0]
skeleton = OrderedDict()
bone_names = mat_data[1].tolist()
for i, bone in enumerate(bone_names):
bone = bone.strip()
if bone == 'Site':
bone = bone_names[i - 1].strip() + bone
skeleton[bone] = {'offset': [], 'parent': [], 'children': []}
parent_ids = mat_data[2][0]
offsets = mat_data[3]
for i, bone in enumerate(skeleton.keys()):
if bone != 'root':
parent = list(skeleton.keys())[parent_ids[i] - 1]
skeleton[bone]['parent'] = parent
skeleton[parent]['children'].append(bone)
skeleton[bone]['offset'] = offsets[i, :]
return skeleton
def construct_hierarchy(skeleton):
hierarchy = ['HIERARCHY\r\n']
level = 0
for i, bone in enumerate(skeleton.keys()):
if bone == 'root':
skeleton[bone]['level'] = 0
else:
parent = skeleton[bone]['parent']
skeleton[bone]['level'] = skeleton[parent]['level'] + 1
for i, bone in enumerate(skeleton.keys()):
offset = skeleton[bone]['offset']
if bone == 'root':
hierarchy.append('ROOT root\r\n')
hierarchy.append('{\r\n')
hierarchy.append('\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.
format(offset[0], offset[1], offset[2]))
hierarchy.append(
'\tCHANNELS 6 Xposition Yposition Zposition Zrotation Yrotation Xrotation\r\n'
)
elif bone.endswith('Site'):
parent = skeleton[bone]['parent']
level = skeleton[bone]['level']
tabs = '\t' * level
hierarchy.append(tabs + 'End Site\r\n')
hierarchy.append(tabs + '{\r\n')
hierarchy.append(tabs +
'\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.format(offset[0],
offset[1], offset[2]))
hierarchy.append(tabs + '}\r\n')
if i == len(skeleton.keys()) - 1:
while level > 0:
level -= 1
hierarchy.append('\t' * level + '}\r\n')
else:
for _ in range(level - skeleton[list(skeleton.keys())[i + 1
]]['level']):
level -= 1
hierarchy.append('\t' * level + '}\r\n')
else:
parent = skeleton[bone]['parent']
level = skeleton[bone]['level']
tabs = '\t' * level
hierarchy.append(tabs + 'JOINT {0}'.format(bone) + '\r\n')
hierarchy.append(tabs + '{\r\n')
hierarchy.append(tabs +
'\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.format(offset[0],
offset[1], offset[2]))
hierarchy.append(tabs +
'\tCHANNELS 3 Zrotation Yrotation Xrotation\r\n')
return hierarchy
def write_bvh(skeleton, hierarchy, motion_data_all, out):
for file_name, motion_data in motion_data_all.items():
joint_quarternions = motion_data['joint_quarternions']
root_pos = motion_data['root_position']
frames = []
for i in range(joint_quarternions.shape[0]):
root_pos_i = root_pos[i]
frame = '{0:.05f} {1:.05f} {2:.05f} '.format(*root_pos_i.tolist())
for j in range(joint_quarternions.shape[1]):
if list(skeleton.keys())[j].endswith('Site'):
continue
R_ij = quaternion_to_rotation_mat(joint_quarternions[i, j,
3], joint_quarternions[i, j, 2], joint_quarternions[i,
j, 1], joint_quarternions[i, j, 0])
euler_ij = rotation_mat_to_euler(R_ij)
frame += '{0:.05f} {1:.05f} {2:.05f} '.format(*list(map(lambda
s: s * (180.0 / math.pi), euler_ij.tolist())))
frame += '\r\n'
frames.append(frame)
with open(os.path.join(out, file_name), 'w') as f:
f.writelines(hierarchy)
f.write('MOTION\r\n')
frames[0] = 'Frames: {0}\r\nFrame Time: 0.0083333\r\n'.format(
joint_quarternions.shape[0]) + frames[0]
f.writelines(frames)
print(os.path.join(out, file_name))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('out', type=str)
args = parser.parse_args()
out = args.out
motion_data_all = load_motion(
'../../motiongan/data/style-dataset/style_motion_database.mat', out)
skeleton = load_skeleton('../../motiongan/data/style-dataset/skeleton.mat')
hierarchy = construct_hierarchy(skeleton)
write_bvh(skeleton, hierarchy, motion_data_all, out)
if __name__ == '__main__':
main()
<|reserved_special_token_1|>
### Script to convert matlab structure file (/motiongan/data/style-dataset/style_motion_database.mat')
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import argparse
import math
import numpy as np
from collections import OrderedDict
import scipy.io
import pickle
from core.utils.euler_to_quaternion import quaternion_to_rotation_mat, rotation_mat_to_euler
## Load motion data from .mat file
def load_motion(mat_path, out):
mat_data = scipy.io.loadmat(mat_path)['motion_database']
file_nums = mat_data.shape[1]
motion_data_all = {}
for f_id in range(file_nums):
motion_data = {}
# Get style and motion content
motion_data['style'] = mat_data[0,f_id][0][0]
motion_data['motion_type'] = mat_data[0,f_id][1][0]
# Get file name
full_path = mat_data[0,f_id][2][0,0][0][0]
file_name = full_path.split('\\')[-1]
# Get joint parameters
frame_nums = mat_data[0,f_id][2].shape[1]
root_pos = np.zeros((frame_nums,3))
joint_nums = mat_data[0,f_id][2][0,0][2].shape[0]
motion_data['joint_nums'] = joint_nums
joint_quarternions = np.zeros((frame_nums, joint_nums, 4))
for i in range(frame_nums):
root_pos[i,:] = mat_data[0,f_id][2][0,i][1]
joint_quarternions[i,:,:] = mat_data[0,f_id][2][0,i][2]
motion_data['root_position'] = root_pos
motion_data['joint_quarternions'] = joint_quarternions
# Get foot contact annotation
motion_data['foot_contact'] = mat_data[0,f_id][3][0]
# Save file as pickle
with open(os.path.join(out, os.path.splitext(file_name)[0]+'.pkl'), 'wb') as f:
pickle.dump(motion_data, f)
motion_data_all[file_name] = motion_data
return motion_data_all
## Load skeleton data from .mat file
def load_skeleton(mat_path):
mat_data = scipy.io.loadmat(mat_path)['skel'][0,0]
# Init skeleton
skeleton = OrderedDict()
bone_names = mat_data[1].tolist()
for i, bone in enumerate(bone_names):
bone = bone.strip()
if bone == 'Site':
bone = bone_names[i-1].strip() + bone
skeleton[bone] = {'offset':[], 'parent':[], 'children':[]}
# Resister bone parent and children, offset
parent_ids = mat_data[2][0]
offsets = mat_data[3]
for i, bone in enumerate(skeleton.keys()):
if bone != 'root':
parent = list(skeleton.keys())[parent_ids[i]-1]
skeleton[bone]['parent'] = parent
skeleton[parent]['children'].append(bone)
skeleton[bone]['offset'] = offsets[i,:]
return skeleton
## Construct hierarchy of skeleton for bvh
def construct_hierarchy(skeleton):
hierarchy = ['HIERARCHY\r\n']
# Calc tree level
level = 0
for i, bone in enumerate(skeleton.keys()):
if bone == 'root':
skeleton[bone]['level'] = 0
else:
parent = skeleton[bone]['parent']
skeleton[bone]['level'] = skeleton[parent]['level'] + 1
# Write hierarchy
for i, bone in enumerate(skeleton.keys()):
offset = skeleton[bone]['offset']
if bone == 'root':
hierarchy.append('ROOT root\r\n')
hierarchy.append('{\r\n')
hierarchy.append('\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.format(offset[0],offset[1],offset[2]))
hierarchy.append('\tCHANNELS 6 Xposition Yposition Zposition Zrotation Yrotation Xrotation\r\n')
elif bone.endswith('Site'):
parent = skeleton[bone]['parent']
level = skeleton[bone]['level']
tabs = '\t' * level
hierarchy.append(tabs + 'End Site\r\n')
hierarchy.append(tabs + '{\r\n')
hierarchy.append(tabs + '\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.format(offset[0],offset[1],offset[2]))
hierarchy.append(tabs + '}\r\n')
# Put end brancket
if i == len(skeleton.keys())-1:
while level > 0:
level -= 1
hierarchy.append('\t' * level + '}\r\n')
else:
for _ in range(level - skeleton[list(skeleton.keys())[i+1]]['level']):
level -= 1
hierarchy.append('\t' * level + '}\r\n')
else:
parent = skeleton[bone]['parent']
level = skeleton[bone]['level']
tabs = '\t'*level
hierarchy.append(tabs + 'JOINT {0}'.format(bone) + '\r\n')
hierarchy.append(tabs + '{\r\n')
hierarchy.append(tabs + '\tOFFSET {0:.05f} {1:.05f} {2:.05f}\r\n'.format(offset[0],offset[1],offset[2]))
hierarchy.append(tabs + '\tCHANNELS 3 Zrotation Yrotation Xrotation\r\n')
#with open('hierarchy_test.txt', 'w') as f:
# f.writelines(hierarchy)
return hierarchy
# Write .bvh file
def write_bvh(skeleton, hierarchy, motion_data_all, out):
for file_name, motion_data in motion_data_all.items():
joint_quarternions = motion_data['joint_quarternions']
root_pos = motion_data['root_position']
# Convert data to list of string
frames = []
for i in range(joint_quarternions.shape[0]):
# Root pos
root_pos_i = root_pos[i]
frame = '{0:.05f} {1:.05f} {2:.05f} '.format(*root_pos_i.tolist())
for j in range(joint_quarternions.shape[1]):
# If Endsite, skip
if list(skeleton.keys())[j].endswith('Site'):
continue
## This implementation is modified to quarternion with 'xyzw' order
R_ij = quaternion_to_rotation_mat(joint_quarternions[i,j,3], joint_quarternions[i,j,2], joint_quarternions[i,j,1], joint_quarternions[i,j,0])
euler_ij = rotation_mat_to_euler(R_ij)
frame += '{0:.05f} {1:.05f} {2:.05f} '.format(*list(map(lambda s: s * (180.0/math.pi), euler_ij.tolist())))
frame += '\r\n'
frames.append(frame)
# Write
with open(os.path.join(out, file_name), 'w') as f:
f.writelines(hierarchy)
f.write('MOTION\r\n')
frames[0] = 'Frames: {0}\r\nFrame Time: 0.0083333\r\n'.format(joint_quarternions.shape[0]) + frames[0]
f.writelines(frames)
print(os.path.join(out, file_name))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('out', type=str)
args = parser.parse_args()
out = args.out
motion_data_all = load_motion('../../motiongan/data/style-dataset/style_motion_database.mat', out)
skeleton = load_skeleton('../../motiongan/data/style-dataset/skeleton.mat')
hierarchy = construct_hierarchy(skeleton)
write_bvh(skeleton, hierarchy, motion_data_all, out)
if __name__ == '__main__':
main()
|
flexible
|
{
"blob_id": "f2dac8b454805829cf5dbe2efe3c0de805ae4cb5",
"index": 1727,
"step-1": "<mask token>\n\n\ndef load_skeleton(mat_path):\n mat_data = scipy.io.loadmat(mat_path)['skel'][0, 0]\n skeleton = OrderedDict()\n bone_names = mat_data[1].tolist()\n for i, bone in enumerate(bone_names):\n bone = bone.strip()\n if bone == 'Site':\n bone = bone_names[i - 1].strip() + bone\n skeleton[bone] = {'offset': [], 'parent': [], 'children': []}\n parent_ids = mat_data[2][0]\n offsets = mat_data[3]\n for i, bone in enumerate(skeleton.keys()):\n if bone != 'root':\n parent = list(skeleton.keys())[parent_ids[i] - 1]\n skeleton[bone]['parent'] = parent\n skeleton[parent]['children'].append(bone)\n skeleton[bone]['offset'] = offsets[i, :]\n return skeleton\n\n\n<mask token>\n\n\ndef write_bvh(skeleton, hierarchy, motion_data_all, out):\n for file_name, motion_data in motion_data_all.items():\n joint_quarternions = motion_data['joint_quarternions']\n root_pos = motion_data['root_position']\n frames = []\n for i in range(joint_quarternions.shape[0]):\n root_pos_i = root_pos[i]\n frame = '{0:.05f} {1:.05f} {2:.05f} '.format(*root_pos_i.tolist())\n for j in range(joint_quarternions.shape[1]):\n if list(skeleton.keys())[j].endswith('Site'):\n continue\n R_ij = quaternion_to_rotation_mat(joint_quarternions[i, j, \n 3], joint_quarternions[i, j, 2], joint_quarternions[i,\n j, 1], joint_quarternions[i, j, 0])\n euler_ij = rotation_mat_to_euler(R_ij)\n frame += '{0:.05f} {1:.05f} {2:.05f} '.format(*list(map(lambda\n s: s * (180.0 / math.pi), euler_ij.tolist())))\n frame += '\\r\\n'\n frames.append(frame)\n with open(os.path.join(out, file_name), 'w') as f:\n f.writelines(hierarchy)\n f.write('MOTION\\r\\n')\n frames[0] = 'Frames: {0}\\r\\nFrame Time: 0.0083333\\r\\n'.format(\n joint_quarternions.shape[0]) + frames[0]\n f.writelines(frames)\n print(os.path.join(out, file_name))\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('out', type=str)\n args = parser.parse_args()\n out = args.out\n motion_data_all = load_motion(\n '../../motiongan/data/style-dataset/style_motion_database.mat', out)\n skeleton = load_skeleton('../../motiongan/data/style-dataset/skeleton.mat')\n hierarchy = construct_hierarchy(skeleton)\n write_bvh(skeleton, hierarchy, motion_data_all, out)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_motion(mat_path, out):\n mat_data = scipy.io.loadmat(mat_path)['motion_database']\n file_nums = mat_data.shape[1]\n motion_data_all = {}\n for f_id in range(file_nums):\n motion_data = {}\n motion_data['style'] = mat_data[0, f_id][0][0]\n motion_data['motion_type'] = mat_data[0, f_id][1][0]\n full_path = mat_data[0, f_id][2][0, 0][0][0]\n file_name = full_path.split('\\\\')[-1]\n frame_nums = mat_data[0, f_id][2].shape[1]\n root_pos = np.zeros((frame_nums, 3))\n joint_nums = mat_data[0, f_id][2][0, 0][2].shape[0]\n motion_data['joint_nums'] = joint_nums\n joint_quarternions = np.zeros((frame_nums, joint_nums, 4))\n for i in range(frame_nums):\n root_pos[i, :] = mat_data[0, f_id][2][0, i][1]\n joint_quarternions[i, :, :] = mat_data[0, f_id][2][0, i][2]\n motion_data['root_position'] = root_pos\n motion_data['joint_quarternions'] = joint_quarternions\n motion_data['foot_contact'] = mat_data[0, f_id][3][0]\n with open(os.path.join(out, os.path.splitext(file_name)[0] + '.pkl'\n ), 'wb') as f:\n pickle.dump(motion_data, f)\n motion_data_all[file_name] = motion_data\n return motion_data_all\n\n\ndef load_skeleton(mat_path):\n mat_data = scipy.io.loadmat(mat_path)['skel'][0, 0]\n skeleton = OrderedDict()\n bone_names = mat_data[1].tolist()\n for i, bone in enumerate(bone_names):\n bone = bone.strip()\n if bone == 'Site':\n bone = bone_names[i - 1].strip() + bone\n skeleton[bone] = {'offset': [], 'parent': [], 'children': []}\n parent_ids = mat_data[2][0]\n offsets = mat_data[3]\n for i, bone in enumerate(skeleton.keys()):\n if bone != 'root':\n parent = list(skeleton.keys())[parent_ids[i] - 1]\n skeleton[bone]['parent'] = parent\n skeleton[parent]['children'].append(bone)\n skeleton[bone]['offset'] = offsets[i, :]\n return skeleton\n\n\ndef construct_hierarchy(skeleton):\n hierarchy = ['HIERARCHY\\r\\n']\n level = 0\n for i, bone in enumerate(skeleton.keys()):\n if bone == 'root':\n skeleton[bone]['level'] = 0\n else:\n parent = skeleton[bone]['parent']\n skeleton[bone]['level'] = skeleton[parent]['level'] + 1\n for i, bone in enumerate(skeleton.keys()):\n offset = skeleton[bone]['offset']\n if bone == 'root':\n hierarchy.append('ROOT root\\r\\n')\n hierarchy.append('{\\r\\n')\n hierarchy.append('\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.\n format(offset[0], offset[1], offset[2]))\n hierarchy.append(\n '\\tCHANNELS 6 Xposition Yposition Zposition Zrotation Yrotation Xrotation\\r\\n'\n )\n elif bone.endswith('Site'):\n parent = skeleton[bone]['parent']\n level = skeleton[bone]['level']\n tabs = '\\t' * level\n hierarchy.append(tabs + 'End Site\\r\\n')\n hierarchy.append(tabs + '{\\r\\n')\n hierarchy.append(tabs +\n '\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.format(offset[0],\n offset[1], offset[2]))\n hierarchy.append(tabs + '}\\r\\n')\n if i == len(skeleton.keys()) - 1:\n while level > 0:\n level -= 1\n hierarchy.append('\\t' * level + '}\\r\\n')\n else:\n for _ in range(level - skeleton[list(skeleton.keys())[i + 1\n ]]['level']):\n level -= 1\n hierarchy.append('\\t' * level + '}\\r\\n')\n else:\n parent = skeleton[bone]['parent']\n level = skeleton[bone]['level']\n tabs = '\\t' * level\n hierarchy.append(tabs + 'JOINT {0}'.format(bone) + '\\r\\n')\n hierarchy.append(tabs + '{\\r\\n')\n hierarchy.append(tabs +\n '\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.format(offset[0],\n offset[1], offset[2]))\n hierarchy.append(tabs +\n '\\tCHANNELS 3 Zrotation Yrotation Xrotation\\r\\n')\n return hierarchy\n\n\ndef write_bvh(skeleton, hierarchy, motion_data_all, out):\n for file_name, motion_data in motion_data_all.items():\n joint_quarternions = motion_data['joint_quarternions']\n root_pos = motion_data['root_position']\n frames = []\n for i in range(joint_quarternions.shape[0]):\n root_pos_i = root_pos[i]\n frame = '{0:.05f} {1:.05f} {2:.05f} '.format(*root_pos_i.tolist())\n for j in range(joint_quarternions.shape[1]):\n if list(skeleton.keys())[j].endswith('Site'):\n continue\n R_ij = quaternion_to_rotation_mat(joint_quarternions[i, j, \n 3], joint_quarternions[i, j, 2], joint_quarternions[i,\n j, 1], joint_quarternions[i, j, 0])\n euler_ij = rotation_mat_to_euler(R_ij)\n frame += '{0:.05f} {1:.05f} {2:.05f} '.format(*list(map(lambda\n s: s * (180.0 / math.pi), euler_ij.tolist())))\n frame += '\\r\\n'\n frames.append(frame)\n with open(os.path.join(out, file_name), 'w') as f:\n f.writelines(hierarchy)\n f.write('MOTION\\r\\n')\n frames[0] = 'Frames: {0}\\r\\nFrame Time: 0.0083333\\r\\n'.format(\n joint_quarternions.shape[0]) + frames[0]\n f.writelines(frames)\n print(os.path.join(out, file_name))\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('out', type=str)\n args = parser.parse_args()\n out = args.out\n motion_data_all = load_motion(\n '../../motiongan/data/style-dataset/style_motion_database.mat', out)\n skeleton = load_skeleton('../../motiongan/data/style-dataset/skeleton.mat')\n hierarchy = construct_hierarchy(skeleton)\n write_bvh(skeleton, hierarchy, motion_data_all, out)\n\n\n<mask token>\n",
"step-3": "<mask token>\nsys.path.append(os.path.join(os.path.dirname(__file__), '..'))\n<mask token>\n\n\ndef load_motion(mat_path, out):\n mat_data = scipy.io.loadmat(mat_path)['motion_database']\n file_nums = mat_data.shape[1]\n motion_data_all = {}\n for f_id in range(file_nums):\n motion_data = {}\n motion_data['style'] = mat_data[0, f_id][0][0]\n motion_data['motion_type'] = mat_data[0, f_id][1][0]\n full_path = mat_data[0, f_id][2][0, 0][0][0]\n file_name = full_path.split('\\\\')[-1]\n frame_nums = mat_data[0, f_id][2].shape[1]\n root_pos = np.zeros((frame_nums, 3))\n joint_nums = mat_data[0, f_id][2][0, 0][2].shape[0]\n motion_data['joint_nums'] = joint_nums\n joint_quarternions = np.zeros((frame_nums, joint_nums, 4))\n for i in range(frame_nums):\n root_pos[i, :] = mat_data[0, f_id][2][0, i][1]\n joint_quarternions[i, :, :] = mat_data[0, f_id][2][0, i][2]\n motion_data['root_position'] = root_pos\n motion_data['joint_quarternions'] = joint_quarternions\n motion_data['foot_contact'] = mat_data[0, f_id][3][0]\n with open(os.path.join(out, os.path.splitext(file_name)[0] + '.pkl'\n ), 'wb') as f:\n pickle.dump(motion_data, f)\n motion_data_all[file_name] = motion_data\n return motion_data_all\n\n\ndef load_skeleton(mat_path):\n mat_data = scipy.io.loadmat(mat_path)['skel'][0, 0]\n skeleton = OrderedDict()\n bone_names = mat_data[1].tolist()\n for i, bone in enumerate(bone_names):\n bone = bone.strip()\n if bone == 'Site':\n bone = bone_names[i - 1].strip() + bone\n skeleton[bone] = {'offset': [], 'parent': [], 'children': []}\n parent_ids = mat_data[2][0]\n offsets = mat_data[3]\n for i, bone in enumerate(skeleton.keys()):\n if bone != 'root':\n parent = list(skeleton.keys())[parent_ids[i] - 1]\n skeleton[bone]['parent'] = parent\n skeleton[parent]['children'].append(bone)\n skeleton[bone]['offset'] = offsets[i, :]\n return skeleton\n\n\ndef construct_hierarchy(skeleton):\n hierarchy = ['HIERARCHY\\r\\n']\n level = 0\n for i, bone in enumerate(skeleton.keys()):\n if bone == 'root':\n skeleton[bone]['level'] = 0\n else:\n parent = skeleton[bone]['parent']\n skeleton[bone]['level'] = skeleton[parent]['level'] + 1\n for i, bone in enumerate(skeleton.keys()):\n offset = skeleton[bone]['offset']\n if bone == 'root':\n hierarchy.append('ROOT root\\r\\n')\n hierarchy.append('{\\r\\n')\n hierarchy.append('\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.\n format(offset[0], offset[1], offset[2]))\n hierarchy.append(\n '\\tCHANNELS 6 Xposition Yposition Zposition Zrotation Yrotation Xrotation\\r\\n'\n )\n elif bone.endswith('Site'):\n parent = skeleton[bone]['parent']\n level = skeleton[bone]['level']\n tabs = '\\t' * level\n hierarchy.append(tabs + 'End Site\\r\\n')\n hierarchy.append(tabs + '{\\r\\n')\n hierarchy.append(tabs +\n '\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.format(offset[0],\n offset[1], offset[2]))\n hierarchy.append(tabs + '}\\r\\n')\n if i == len(skeleton.keys()) - 1:\n while level > 0:\n level -= 1\n hierarchy.append('\\t' * level + '}\\r\\n')\n else:\n for _ in range(level - skeleton[list(skeleton.keys())[i + 1\n ]]['level']):\n level -= 1\n hierarchy.append('\\t' * level + '}\\r\\n')\n else:\n parent = skeleton[bone]['parent']\n level = skeleton[bone]['level']\n tabs = '\\t' * level\n hierarchy.append(tabs + 'JOINT {0}'.format(bone) + '\\r\\n')\n hierarchy.append(tabs + '{\\r\\n')\n hierarchy.append(tabs +\n '\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.format(offset[0],\n offset[1], offset[2]))\n hierarchy.append(tabs +\n '\\tCHANNELS 3 Zrotation Yrotation Xrotation\\r\\n')\n return hierarchy\n\n\ndef write_bvh(skeleton, hierarchy, motion_data_all, out):\n for file_name, motion_data in motion_data_all.items():\n joint_quarternions = motion_data['joint_quarternions']\n root_pos = motion_data['root_position']\n frames = []\n for i in range(joint_quarternions.shape[0]):\n root_pos_i = root_pos[i]\n frame = '{0:.05f} {1:.05f} {2:.05f} '.format(*root_pos_i.tolist())\n for j in range(joint_quarternions.shape[1]):\n if list(skeleton.keys())[j].endswith('Site'):\n continue\n R_ij = quaternion_to_rotation_mat(joint_quarternions[i, j, \n 3], joint_quarternions[i, j, 2], joint_quarternions[i,\n j, 1], joint_quarternions[i, j, 0])\n euler_ij = rotation_mat_to_euler(R_ij)\n frame += '{0:.05f} {1:.05f} {2:.05f} '.format(*list(map(lambda\n s: s * (180.0 / math.pi), euler_ij.tolist())))\n frame += '\\r\\n'\n frames.append(frame)\n with open(os.path.join(out, file_name), 'w') as f:\n f.writelines(hierarchy)\n f.write('MOTION\\r\\n')\n frames[0] = 'Frames: {0}\\r\\nFrame Time: 0.0083333\\r\\n'.format(\n joint_quarternions.shape[0]) + frames[0]\n f.writelines(frames)\n print(os.path.join(out, file_name))\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('out', type=str)\n args = parser.parse_args()\n out = args.out\n motion_data_all = load_motion(\n '../../motiongan/data/style-dataset/style_motion_database.mat', out)\n skeleton = load_skeleton('../../motiongan/data/style-dataset/skeleton.mat')\n hierarchy = construct_hierarchy(skeleton)\n write_bvh(skeleton, hierarchy, motion_data_all, out)\n\n\nif __name__ == '__main__':\n main()\n",
"step-4": "import os\nimport sys\nsys.path.append(os.path.join(os.path.dirname(__file__), '..'))\nimport argparse\nimport math\nimport numpy as np\nfrom collections import OrderedDict\nimport scipy.io\nimport pickle\nfrom core.utils.euler_to_quaternion import quaternion_to_rotation_mat, rotation_mat_to_euler\n\n\ndef load_motion(mat_path, out):\n mat_data = scipy.io.loadmat(mat_path)['motion_database']\n file_nums = mat_data.shape[1]\n motion_data_all = {}\n for f_id in range(file_nums):\n motion_data = {}\n motion_data['style'] = mat_data[0, f_id][0][0]\n motion_data['motion_type'] = mat_data[0, f_id][1][0]\n full_path = mat_data[0, f_id][2][0, 0][0][0]\n file_name = full_path.split('\\\\')[-1]\n frame_nums = mat_data[0, f_id][2].shape[1]\n root_pos = np.zeros((frame_nums, 3))\n joint_nums = mat_data[0, f_id][2][0, 0][2].shape[0]\n motion_data['joint_nums'] = joint_nums\n joint_quarternions = np.zeros((frame_nums, joint_nums, 4))\n for i in range(frame_nums):\n root_pos[i, :] = mat_data[0, f_id][2][0, i][1]\n joint_quarternions[i, :, :] = mat_data[0, f_id][2][0, i][2]\n motion_data['root_position'] = root_pos\n motion_data['joint_quarternions'] = joint_quarternions\n motion_data['foot_contact'] = mat_data[0, f_id][3][0]\n with open(os.path.join(out, os.path.splitext(file_name)[0] + '.pkl'\n ), 'wb') as f:\n pickle.dump(motion_data, f)\n motion_data_all[file_name] = motion_data\n return motion_data_all\n\n\ndef load_skeleton(mat_path):\n mat_data = scipy.io.loadmat(mat_path)['skel'][0, 0]\n skeleton = OrderedDict()\n bone_names = mat_data[1].tolist()\n for i, bone in enumerate(bone_names):\n bone = bone.strip()\n if bone == 'Site':\n bone = bone_names[i - 1].strip() + bone\n skeleton[bone] = {'offset': [], 'parent': [], 'children': []}\n parent_ids = mat_data[2][0]\n offsets = mat_data[3]\n for i, bone in enumerate(skeleton.keys()):\n if bone != 'root':\n parent = list(skeleton.keys())[parent_ids[i] - 1]\n skeleton[bone]['parent'] = parent\n skeleton[parent]['children'].append(bone)\n skeleton[bone]['offset'] = offsets[i, :]\n return skeleton\n\n\ndef construct_hierarchy(skeleton):\n hierarchy = ['HIERARCHY\\r\\n']\n level = 0\n for i, bone in enumerate(skeleton.keys()):\n if bone == 'root':\n skeleton[bone]['level'] = 0\n else:\n parent = skeleton[bone]['parent']\n skeleton[bone]['level'] = skeleton[parent]['level'] + 1\n for i, bone in enumerate(skeleton.keys()):\n offset = skeleton[bone]['offset']\n if bone == 'root':\n hierarchy.append('ROOT root\\r\\n')\n hierarchy.append('{\\r\\n')\n hierarchy.append('\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.\n format(offset[0], offset[1], offset[2]))\n hierarchy.append(\n '\\tCHANNELS 6 Xposition Yposition Zposition Zrotation Yrotation Xrotation\\r\\n'\n )\n elif bone.endswith('Site'):\n parent = skeleton[bone]['parent']\n level = skeleton[bone]['level']\n tabs = '\\t' * level\n hierarchy.append(tabs + 'End Site\\r\\n')\n hierarchy.append(tabs + '{\\r\\n')\n hierarchy.append(tabs +\n '\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.format(offset[0],\n offset[1], offset[2]))\n hierarchy.append(tabs + '}\\r\\n')\n if i == len(skeleton.keys()) - 1:\n while level > 0:\n level -= 1\n hierarchy.append('\\t' * level + '}\\r\\n')\n else:\n for _ in range(level - skeleton[list(skeleton.keys())[i + 1\n ]]['level']):\n level -= 1\n hierarchy.append('\\t' * level + '}\\r\\n')\n else:\n parent = skeleton[bone]['parent']\n level = skeleton[bone]['level']\n tabs = '\\t' * level\n hierarchy.append(tabs + 'JOINT {0}'.format(bone) + '\\r\\n')\n hierarchy.append(tabs + '{\\r\\n')\n hierarchy.append(tabs +\n '\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.format(offset[0],\n offset[1], offset[2]))\n hierarchy.append(tabs +\n '\\tCHANNELS 3 Zrotation Yrotation Xrotation\\r\\n')\n return hierarchy\n\n\ndef write_bvh(skeleton, hierarchy, motion_data_all, out):\n for file_name, motion_data in motion_data_all.items():\n joint_quarternions = motion_data['joint_quarternions']\n root_pos = motion_data['root_position']\n frames = []\n for i in range(joint_quarternions.shape[0]):\n root_pos_i = root_pos[i]\n frame = '{0:.05f} {1:.05f} {2:.05f} '.format(*root_pos_i.tolist())\n for j in range(joint_quarternions.shape[1]):\n if list(skeleton.keys())[j].endswith('Site'):\n continue\n R_ij = quaternion_to_rotation_mat(joint_quarternions[i, j, \n 3], joint_quarternions[i, j, 2], joint_quarternions[i,\n j, 1], joint_quarternions[i, j, 0])\n euler_ij = rotation_mat_to_euler(R_ij)\n frame += '{0:.05f} {1:.05f} {2:.05f} '.format(*list(map(lambda\n s: s * (180.0 / math.pi), euler_ij.tolist())))\n frame += '\\r\\n'\n frames.append(frame)\n with open(os.path.join(out, file_name), 'w') as f:\n f.writelines(hierarchy)\n f.write('MOTION\\r\\n')\n frames[0] = 'Frames: {0}\\r\\nFrame Time: 0.0083333\\r\\n'.format(\n joint_quarternions.shape[0]) + frames[0]\n f.writelines(frames)\n print(os.path.join(out, file_name))\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('out', type=str)\n args = parser.parse_args()\n out = args.out\n motion_data_all = load_motion(\n '../../motiongan/data/style-dataset/style_motion_database.mat', out)\n skeleton = load_skeleton('../../motiongan/data/style-dataset/skeleton.mat')\n hierarchy = construct_hierarchy(skeleton)\n write_bvh(skeleton, hierarchy, motion_data_all, out)\n\n\nif __name__ == '__main__':\n main()\n",
"step-5": "### Script to convert matlab structure file (/motiongan/data/style-dataset/style_motion_database.mat')\nimport os\nimport sys\nsys.path.append(os.path.join(os.path.dirname(__file__), '..'))\nimport argparse\nimport math\nimport numpy as np\nfrom collections import OrderedDict\nimport scipy.io\nimport pickle\n\nfrom core.utils.euler_to_quaternion import quaternion_to_rotation_mat, rotation_mat_to_euler\n\n## Load motion data from .mat file\ndef load_motion(mat_path, out):\n mat_data = scipy.io.loadmat(mat_path)['motion_database']\n file_nums = mat_data.shape[1]\n motion_data_all = {}\n for f_id in range(file_nums):\n motion_data = {}\n # Get style and motion content \n motion_data['style'] = mat_data[0,f_id][0][0]\n motion_data['motion_type'] = mat_data[0,f_id][1][0] \n\n # Get file name\n full_path = mat_data[0,f_id][2][0,0][0][0]\n file_name = full_path.split('\\\\')[-1]\n\n # Get joint parameters\n frame_nums = mat_data[0,f_id][2].shape[1]\n root_pos = np.zeros((frame_nums,3))\n \n joint_nums = mat_data[0,f_id][2][0,0][2].shape[0]\n motion_data['joint_nums'] = joint_nums\n joint_quarternions = np.zeros((frame_nums, joint_nums, 4))\n for i in range(frame_nums):\n root_pos[i,:] = mat_data[0,f_id][2][0,i][1]\n joint_quarternions[i,:,:] = mat_data[0,f_id][2][0,i][2]\n motion_data['root_position'] = root_pos\n motion_data['joint_quarternions'] = joint_quarternions\n\n # Get foot contact annotation\n motion_data['foot_contact'] = mat_data[0,f_id][3][0]\n\n\n # Save file as pickle\n with open(os.path.join(out, os.path.splitext(file_name)[0]+'.pkl'), 'wb') as f:\n pickle.dump(motion_data, f)\n\n motion_data_all[file_name] = motion_data\n\n return motion_data_all\n\n\n## Load skeleton data from .mat file\ndef load_skeleton(mat_path):\n mat_data = scipy.io.loadmat(mat_path)['skel'][0,0]\n\n # Init skeleton\n skeleton = OrderedDict()\n bone_names = mat_data[1].tolist()\n for i, bone in enumerate(bone_names):\n bone = bone.strip()\n if bone == 'Site':\n bone = bone_names[i-1].strip() + bone\n skeleton[bone] = {'offset':[], 'parent':[], 'children':[]}\n \n # Resister bone parent and children, offset\n parent_ids = mat_data[2][0]\n offsets = mat_data[3]\n for i, bone in enumerate(skeleton.keys()):\n if bone != 'root': \n parent = list(skeleton.keys())[parent_ids[i]-1]\n skeleton[bone]['parent'] = parent\n skeleton[parent]['children'].append(bone)\n\n skeleton[bone]['offset'] = offsets[i,:]\n\n return skeleton\n\n\n## Construct hierarchy of skeleton for bvh\ndef construct_hierarchy(skeleton):\n hierarchy = ['HIERARCHY\\r\\n']\n \n # Calc tree level\n level = 0\n for i, bone in enumerate(skeleton.keys()):\n if bone == 'root':\n skeleton[bone]['level'] = 0\n else:\n parent = skeleton[bone]['parent']\n skeleton[bone]['level'] = skeleton[parent]['level'] + 1\n\n # Write hierarchy\n for i, bone in enumerate(skeleton.keys()):\n offset = skeleton[bone]['offset']\n if bone == 'root':\n hierarchy.append('ROOT root\\r\\n')\n hierarchy.append('{\\r\\n')\n hierarchy.append('\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.format(offset[0],offset[1],offset[2]))\n hierarchy.append('\\tCHANNELS 6 Xposition Yposition Zposition Zrotation Yrotation Xrotation\\r\\n')\n\n elif bone.endswith('Site'):\n parent = skeleton[bone]['parent']\n level = skeleton[bone]['level']\n tabs = '\\t' * level\n hierarchy.append(tabs + 'End Site\\r\\n')\n hierarchy.append(tabs + '{\\r\\n')\n hierarchy.append(tabs + '\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.format(offset[0],offset[1],offset[2]))\n hierarchy.append(tabs + '}\\r\\n')\n # Put end brancket\n if i == len(skeleton.keys())-1:\n while level > 0:\n level -= 1\n hierarchy.append('\\t' * level + '}\\r\\n')\n else: \n for _ in range(level - skeleton[list(skeleton.keys())[i+1]]['level']):\n level -= 1\n hierarchy.append('\\t' * level + '}\\r\\n')\n\n else:\n parent = skeleton[bone]['parent']\n level = skeleton[bone]['level']\n tabs = '\\t'*level\n hierarchy.append(tabs + 'JOINT {0}'.format(bone) + '\\r\\n')\n hierarchy.append(tabs + '{\\r\\n')\n hierarchy.append(tabs + '\\tOFFSET {0:.05f} {1:.05f} {2:.05f}\\r\\n'.format(offset[0],offset[1],offset[2]))\n hierarchy.append(tabs + '\\tCHANNELS 3 Zrotation Yrotation Xrotation\\r\\n')\n \n #with open('hierarchy_test.txt', 'w') as f:\n # f.writelines(hierarchy)\n return hierarchy\n\n\n# Write .bvh file\ndef write_bvh(skeleton, hierarchy, motion_data_all, out):\n for file_name, motion_data in motion_data_all.items():\n joint_quarternions = motion_data['joint_quarternions']\n root_pos = motion_data['root_position']\n\n # Convert data to list of string\n frames = []\n for i in range(joint_quarternions.shape[0]):\n # Root pos\n root_pos_i = root_pos[i]\n frame = '{0:.05f} {1:.05f} {2:.05f} '.format(*root_pos_i.tolist()) \n\n for j in range(joint_quarternions.shape[1]):\n # If Endsite, skip\n if list(skeleton.keys())[j].endswith('Site'): \n continue\n ## This implementation is modified to quarternion with 'xyzw' order\n R_ij = quaternion_to_rotation_mat(joint_quarternions[i,j,3], joint_quarternions[i,j,2], joint_quarternions[i,j,1], joint_quarternions[i,j,0]) \n euler_ij = rotation_mat_to_euler(R_ij)\n frame += '{0:.05f} {1:.05f} {2:.05f} '.format(*list(map(lambda s: s * (180.0/math.pi), euler_ij.tolist())))\n\n frame += '\\r\\n'\n frames.append(frame)\n \n # Write\n with open(os.path.join(out, file_name), 'w') as f:\n f.writelines(hierarchy)\n\n f.write('MOTION\\r\\n')\n frames[0] = 'Frames: {0}\\r\\nFrame Time: 0.0083333\\r\\n'.format(joint_quarternions.shape[0]) + frames[0]\n f.writelines(frames)\n \n print(os.path.join(out, file_name))\n\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('out', type=str)\n\n args = parser.parse_args()\n out = args.out\n\n motion_data_all = load_motion('../../motiongan/data/style-dataset/style_motion_database.mat', out)\n skeleton = load_skeleton('../../motiongan/data/style-dataset/skeleton.mat')\n hierarchy = construct_hierarchy(skeleton)\n write_bvh(skeleton, hierarchy, motion_data_all, out) \n\nif __name__ == '__main__':\n main()\n",
"step-ids": [
3,
5,
6,
7,
8
]
}
|
[
3,
5,
6,
7,
8
] |
import matplotlib.pyplot as plt
import numpy as np
import unittest
from ema_workbench.analysis import clusterer
from test import utilities
class ClusterTestCase(unittest.TestCase):
def test_cluster(self):
n = 10
experiments, outcomes = utilities.load_flu_data()
data = outcomes["infected fraction R1"][0:n, :]
distances = clusterer.calculate_cid(data)
self.assertEqual(distances.shape, (n, n))
clusterer.plot_dendrogram(distances)
plt.draw()
assignment = clusterer.apply_agglomerative_clustering(distances, 2)
self.assertEqual(assignment.shape, (10,))
distances = clusterer.calculate_cid(data, condensed_form=True)
self.assertEqual(distances.shape, sum(np.arange(0, n)))
clusterer.plot_dendrogram(distances)
plt.draw()
plt.close("all")
if __name__ == "__main__":
unittest.main()
|
normal
|
{
"blob_id": "a7e2b016131dfdb75e537e86875e1b2f19fb3d9d",
"index": 2580,
"step-1": "<mask token>\n\n\nclass ClusterTestCase(unittest.TestCase):\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ClusterTestCase(unittest.TestCase):\n\n def test_cluster(self):\n n = 10\n experiments, outcomes = utilities.load_flu_data()\n data = outcomes['infected fraction R1'][0:n, :]\n distances = clusterer.calculate_cid(data)\n self.assertEqual(distances.shape, (n, n))\n clusterer.plot_dendrogram(distances)\n plt.draw()\n assignment = clusterer.apply_agglomerative_clustering(distances, 2)\n self.assertEqual(assignment.shape, (10,))\n distances = clusterer.calculate_cid(data, condensed_form=True)\n self.assertEqual(distances.shape, sum(np.arange(0, n)))\n clusterer.plot_dendrogram(distances)\n plt.draw()\n plt.close('all')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ClusterTestCase(unittest.TestCase):\n\n def test_cluster(self):\n n = 10\n experiments, outcomes = utilities.load_flu_data()\n data = outcomes['infected fraction R1'][0:n, :]\n distances = clusterer.calculate_cid(data)\n self.assertEqual(distances.shape, (n, n))\n clusterer.plot_dendrogram(distances)\n plt.draw()\n assignment = clusterer.apply_agglomerative_clustering(distances, 2)\n self.assertEqual(assignment.shape, (10,))\n distances = clusterer.calculate_cid(data, condensed_form=True)\n self.assertEqual(distances.shape, sum(np.arange(0, n)))\n clusterer.plot_dendrogram(distances)\n plt.draw()\n plt.close('all')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "import matplotlib.pyplot as plt\nimport numpy as np\nimport unittest\nfrom ema_workbench.analysis import clusterer\nfrom test import utilities\n\n\nclass ClusterTestCase(unittest.TestCase):\n\n def test_cluster(self):\n n = 10\n experiments, outcomes = utilities.load_flu_data()\n data = outcomes['infected fraction R1'][0:n, :]\n distances = clusterer.calculate_cid(data)\n self.assertEqual(distances.shape, (n, n))\n clusterer.plot_dendrogram(distances)\n plt.draw()\n assignment = clusterer.apply_agglomerative_clustering(distances, 2)\n self.assertEqual(assignment.shape, (10,))\n distances = clusterer.calculate_cid(data, condensed_form=True)\n self.assertEqual(distances.shape, sum(np.arange(0, n)))\n clusterer.plot_dendrogram(distances)\n plt.draw()\n plt.close('all')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "import matplotlib.pyplot as plt\nimport numpy as np\nimport unittest\n\nfrom ema_workbench.analysis import clusterer\nfrom test import utilities\n\n\nclass ClusterTestCase(unittest.TestCase):\n def test_cluster(self):\n n = 10\n experiments, outcomes = utilities.load_flu_data()\n data = outcomes[\"infected fraction R1\"][0:n, :]\n\n distances = clusterer.calculate_cid(data)\n self.assertEqual(distances.shape, (n, n))\n clusterer.plot_dendrogram(distances)\n plt.draw()\n\n assignment = clusterer.apply_agglomerative_clustering(distances, 2)\n self.assertEqual(assignment.shape, (10,))\n\n distances = clusterer.calculate_cid(data, condensed_form=True)\n self.assertEqual(distances.shape, sum(np.arange(0, n)))\n clusterer.plot_dendrogram(distances)\n plt.draw()\n\n plt.close(\"all\")\n\n\nif __name__ == \"__main__\":\n unittest.main()\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
file.write(response.content)
file.close()
<|reserved_special_token_0|>
bot.start()
run()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
response = requests.get(BG_IMAGE)
file = open('./etc/tg_vc_bot.jpg', 'wb')
file.write(response.content)
file.close()
bot = Bot(':memory:', API_ID, API_HASH, bot_token=BOT_TOKEN, plugins=dict(
root='samantha.modules'))
bot.start()
run()
<|reserved_special_token_1|>
import requests
from pyrogram import Client as Bot
from samantha.config import API_HASH, API_ID, BG_IMAGE, BOT_TOKEN
from samantha.services.callsmusic import run
response = requests.get(BG_IMAGE)
file = open('./etc/tg_vc_bot.jpg', 'wb')
file.write(response.content)
file.close()
bot = Bot(':memory:', API_ID, API_HASH, bot_token=BOT_TOKEN, plugins=dict(
root='samantha.modules'))
bot.start()
run()
<|reserved_special_token_1|>
import requests
from pyrogram import Client as Bot
from samantha.config import API_HASH, API_ID, BG_IMAGE, BOT_TOKEN
from samantha.services.callsmusic import run
response = requests.get(BG_IMAGE)
file = open("./etc/tg_vc_bot.jpg", "wb")
file.write(response.content)
file.close()
bot = Bot(
":memory:",
API_ID,
API_HASH,
bot_token=BOT_TOKEN,
plugins=dict(root="samantha.modules"),
)
bot.start()
run()
|
flexible
|
{
"blob_id": "c5ac37ce09f7cd76ccd9b93c64e602209a04c55c",
"index": 1824,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfile.write(response.content)\nfile.close()\n<mask token>\nbot.start()\nrun()\n",
"step-3": "<mask token>\nresponse = requests.get(BG_IMAGE)\nfile = open('./etc/tg_vc_bot.jpg', 'wb')\nfile.write(response.content)\nfile.close()\nbot = Bot(':memory:', API_ID, API_HASH, bot_token=BOT_TOKEN, plugins=dict(\n root='samantha.modules'))\nbot.start()\nrun()\n",
"step-4": "import requests\nfrom pyrogram import Client as Bot\nfrom samantha.config import API_HASH, API_ID, BG_IMAGE, BOT_TOKEN\nfrom samantha.services.callsmusic import run\nresponse = requests.get(BG_IMAGE)\nfile = open('./etc/tg_vc_bot.jpg', 'wb')\nfile.write(response.content)\nfile.close()\nbot = Bot(':memory:', API_ID, API_HASH, bot_token=BOT_TOKEN, plugins=dict(\n root='samantha.modules'))\nbot.start()\nrun()\n",
"step-5": "import requests\nfrom pyrogram import Client as Bot\n\nfrom samantha.config import API_HASH, API_ID, BG_IMAGE, BOT_TOKEN\nfrom samantha.services.callsmusic import run\n\nresponse = requests.get(BG_IMAGE)\nfile = open(\"./etc/tg_vc_bot.jpg\", \"wb\")\nfile.write(response.content)\nfile.close()\n\nbot = Bot(\n \":memory:\",\n API_ID,\n API_HASH,\n bot_token=BOT_TOKEN,\n plugins=dict(root=\"samantha.modules\"),\n)\n\nbot.start()\nrun()\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
engine = create_engine(config.DB_URI)
Session = scoped_session(sessionmaker(bind=engine))
<|reserved_special_token_1|>
from app import config
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
engine = create_engine(config.DB_URI)
Session = scoped_session(sessionmaker(bind=engine))
|
flexible
|
{
"blob_id": "86c1aee21639958f707f99bc2468e952ad6c1859",
"index": 9352,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nengine = create_engine(config.DB_URI)\nSession = scoped_session(sessionmaker(bind=engine))\n",
"step-3": "from app import config\nfrom sqlalchemy import create_engine\nfrom sqlalchemy.orm import scoped_session, sessionmaker\nengine = create_engine(config.DB_URI)\nSession = scoped_session(sessionmaker(bind=engine))\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
#!/usr/bin/env python
from pymongo import MongoClient
import serial
import sys, os, datetime
os.system('sudo stty -F /dev/ttyS0 1200 sane evenp parenb cs7 -crtscts')
SERIAL = '/dev/ttyS0'
try:
ser = serial.Serial(
port=SERIAL,
baudrate = 1200,
parity=serial.PARITY_EVEN,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.SEVENBITS,
timeout=1)
except:
print "Impossible d'ouvrir le port serie" + SERIAL
print sys.exc_info()
sys.exit(1)
# 2. Lecture d'une trame complete
compteur=0
data = {}
#'Periode':'HP','IndexHCreuses': "019728489",'IndexHPleines':'019728489','InstantI1':'027','InstantI2':'027','InstantI3':'027','IMaxi1':'027','IMaxi2':'027','IMaxi3':'028','PuissanceApp':'02695','PuissanceMax':'13160'}
ADCO ='ADCO'
while True :
trame=ser.readline().strip()
listeTrame = trame.split(' ')
if len(listeTrame)>1 :
key, value = listeTrame[0], listeTrame[1]
print key + ":" + value
if key == "ADCO" :
if 'ADCO' not in ADCO : break
ADCO = value
# la periode pour moi est 'HC' ou 'HP', seul les 2 1ers char sont utiles
elif key == "PTEC" : data['Periode'] = value[:2]
elif key == "HCHC" : data['IndexHCreuses'] = int(value)
elif key == "HCHP" : data['IndexHPleines'] = int(value)
elif key == "IINST1" : data['InstantI1'] = int(value)
elif key == "IINST2" : data['InstantI2'] = int(value)
elif key == "IINST3" : data['InstantI3'] = int(value)
elif key == "IMAX1" : data['IMaxi1'] = int(value)
elif key == "IMAX2" : data['IMaxi2'] = int(value)
elif key == "IMAX3" : data['IMaxi3'] = int(value)
elif key == "PAPP" : data['PuissanceApp'] = int(value)
elif key == "PMAX" : data['PuissanceMax'] = int(value)
dateDeMesure = datetime.datetime.utcnow()
data['dateMesure'] = dateDeMesure
clientMongo = MongoClient('mongodb://bber:cab32b79@nounours:27017/')
db = clientMongo.teleinfo
collec = db.conso
print (data)
un_id=collec.insert_one(data).inserted_id
print (un_id)
ser.close()
|
normal
|
{
"blob_id": "d0997f5001090dd8925640cd5b0f3eb2e6768113",
"index": 3862,
"step-1": "#!/usr/bin/env python\n\n\nfrom pymongo import MongoClient\nimport serial\nimport sys, os, datetime\n\nos.system('sudo stty -F /dev/ttyS0 1200 sane evenp parenb cs7 -crtscts')\n\nSERIAL = '/dev/ttyS0'\ntry:\n ser = serial.Serial(\n port=SERIAL,\n baudrate = 1200,\n parity=serial.PARITY_EVEN,\n stopbits=serial.STOPBITS_ONE,\n bytesize=serial.SEVENBITS,\n timeout=1)\nexcept:\n print \"Impossible d'ouvrir le port serie\" + SERIAL\n print sys.exc_info()\n sys.exit(1)\n\n# 2. Lecture d'une trame complete\ncompteur=0\ndata = {}\n#'Periode':'HP','IndexHCreuses': \"019728489\",'IndexHPleines':'019728489','InstantI1':'027','InstantI2':'027','InstantI3':'027','IMaxi1':'027','IMaxi2':'027','IMaxi3':'028','PuissanceApp':'02695','PuissanceMax':'13160'} \nADCO ='ADCO'\nwhile True :\n\ttrame=ser.readline().strip()\n\tlisteTrame = trame.split(' ')\n\tif len(listeTrame)>1 :\n\t\tkey, value = listeTrame[0], listeTrame[1]\n\t\tprint key + \":\" + value\n\t\tif key == \"ADCO\" : \n\t \t\tif 'ADCO' not in ADCO : break\n\t \t\tADCO = value\n\t\t\t# la periode pour moi est 'HC' ou 'HP', seul les 2 1ers char sont utiles\n\t\telif key == \"PTEC\" : data['Periode'] = value[:2]\n\t\telif key == \"HCHC\" : data['IndexHCreuses'] = int(value)\n\t\telif key == \"HCHP\" : data['IndexHPleines'] = int(value)\n\t\telif key == \"IINST1\" : data['InstantI1'] = int(value)\n\t\telif key == \"IINST2\" : data['InstantI2'] = int(value)\n\t\telif key == \"IINST3\" : data['InstantI3'] = int(value)\n\t\telif key == \"IMAX1\" : data['IMaxi1'] = int(value)\n\t\telif key == \"IMAX2\" : data['IMaxi2'] = int(value)\n\t\telif key == \"IMAX3\" : data['IMaxi3'] = int(value)\n\t\telif key == \"PAPP\" : data['PuissanceApp'] = int(value)\n\t\telif key == \"PMAX\" : data['PuissanceMax'] = int(value)\n\ndateDeMesure = datetime.datetime.utcnow()\n\ndata['dateMesure'] = dateDeMesure\n\nclientMongo = MongoClient('mongodb://bber:cab32b79@nounours:27017/')\ndb = clientMongo.teleinfo\ncollec = db.conso\n\nprint (data)\nun_id=collec.insert_one(data).inserted_id\nprint (un_id)\nser.close()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
def chess():
row = 0
line = 0
chess1 = []
chess2 = []
for line in range(3):
x1 = (0,line)
chess1.append(x1)
for line in range(3):
x2 = (1,line)
chess2.append(x2)
print(chess1)
print(chess2)
for x in range(len(chess1))
if chess2[x][1] != chess1[]
chess()
|
normal
|
{
"blob_id": "7d0d1a53a249167edade24a4e9305c95288a8574",
"index": 4851,
"step-1": "def chess():\n row = 0\n line = 0\n chess1 = []\n chess2 = []\n for line in range(3):\n x1 = (0,line)\n chess1.append(x1)\n for line in range(3):\n x2 = (1,line)\n chess2.append(x2)\n print(chess1)\n print(chess2)\n for x in range(len(chess1))\n if chess2[x][1] != chess1[]\n \nchess()\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
def output(i, out):
with open('B-large.out', 'a') as outfile:
outfile.write('Case #{0}: {1}\n'.format(i, out))
def solve(i, stack):
cursymbol = stack[0]
counter = 0 if stack[-1] == '+' else 1
for symbol in stack:
if symbol != cursymbol:
cursymbol = symbol
counter += 1
output(i, counter)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def output(i, out):
with open('B-large.out', 'a') as outfile:
outfile.write('Case #{0}: {1}\n'.format(i, out))
def solve(i, stack):
cursymbol = stack[0]
counter = 0 if stack[-1] == '+' else 1
for symbol in stack:
if symbol != cursymbol:
cursymbol = symbol
counter += 1
output(i, counter)
<|reserved_special_token_0|>
for i, line in enumerate(lines):
if i > 0:
solve(i, line)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def output(i, out):
with open('B-large.out', 'a') as outfile:
outfile.write('Case #{0}: {1}\n'.format(i, out))
def solve(i, stack):
cursymbol = stack[0]
counter = 0 if stack[-1] == '+' else 1
for symbol in stack:
if symbol != cursymbol:
cursymbol = symbol
counter += 1
output(i, counter)
lines = np.loadtxt('B-large.in', dtype=str)
for i, line in enumerate(lines):
if i > 0:
solve(i, line)
<|reserved_special_token_1|>
import numpy as np
def output(i, out):
with open('B-large.out', 'a') as outfile:
outfile.write('Case #{0}: {1}\n'.format(i, out))
def solve(i, stack):
cursymbol = stack[0]
counter = 0 if stack[-1] == '+' else 1
for symbol in stack:
if symbol != cursymbol:
cursymbol = symbol
counter += 1
output(i, counter)
lines = np.loadtxt('B-large.in', dtype=str)
for i, line in enumerate(lines):
if i > 0:
solve(i, line)
<|reserved_special_token_1|>
import numpy as np
def output(i, out):
with open('B-large.out', 'a') as outfile:
outfile.write("Case #{0}: {1}\n".format(i, out))
def solve(i, stack):
cursymbol = stack[0]
counter = 0 if stack[-1] == "+" else 1
for symbol in stack:
if symbol != cursymbol:
cursymbol = symbol
counter += 1
output(i, counter)
lines = np.loadtxt('B-large.in', dtype=str)
for i, line in enumerate(lines):
if i > 0:
solve(i, line)
|
flexible
|
{
"blob_id": "752679d2484b6b91a734c7cbe4a99bd5676661eb",
"index": 9498,
"step-1": "<mask token>\n\n\ndef output(i, out):\n with open('B-large.out', 'a') as outfile:\n outfile.write('Case #{0}: {1}\\n'.format(i, out))\n\n\ndef solve(i, stack):\n cursymbol = stack[0]\n counter = 0 if stack[-1] == '+' else 1\n for symbol in stack:\n if symbol != cursymbol:\n cursymbol = symbol\n counter += 1\n output(i, counter)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef output(i, out):\n with open('B-large.out', 'a') as outfile:\n outfile.write('Case #{0}: {1}\\n'.format(i, out))\n\n\ndef solve(i, stack):\n cursymbol = stack[0]\n counter = 0 if stack[-1] == '+' else 1\n for symbol in stack:\n if symbol != cursymbol:\n cursymbol = symbol\n counter += 1\n output(i, counter)\n\n\n<mask token>\nfor i, line in enumerate(lines):\n if i > 0:\n solve(i, line)\n",
"step-3": "<mask token>\n\n\ndef output(i, out):\n with open('B-large.out', 'a') as outfile:\n outfile.write('Case #{0}: {1}\\n'.format(i, out))\n\n\ndef solve(i, stack):\n cursymbol = stack[0]\n counter = 0 if stack[-1] == '+' else 1\n for symbol in stack:\n if symbol != cursymbol:\n cursymbol = symbol\n counter += 1\n output(i, counter)\n\n\nlines = np.loadtxt('B-large.in', dtype=str)\nfor i, line in enumerate(lines):\n if i > 0:\n solve(i, line)\n",
"step-4": "import numpy as np\n\n\ndef output(i, out):\n with open('B-large.out', 'a') as outfile:\n outfile.write('Case #{0}: {1}\\n'.format(i, out))\n\n\ndef solve(i, stack):\n cursymbol = stack[0]\n counter = 0 if stack[-1] == '+' else 1\n for symbol in stack:\n if symbol != cursymbol:\n cursymbol = symbol\n counter += 1\n output(i, counter)\n\n\nlines = np.loadtxt('B-large.in', dtype=str)\nfor i, line in enumerate(lines):\n if i > 0:\n solve(i, line)\n",
"step-5": "import numpy as np\r\n\r\ndef output(i, out):\r\n with open('B-large.out', 'a') as outfile:\r\n outfile.write(\"Case #{0}: {1}\\n\".format(i, out))\r\n\r\ndef solve(i, stack): \r\n cursymbol = stack[0]\r\n counter = 0 if stack[-1] == \"+\" else 1\r\n for symbol in stack:\r\n if symbol != cursymbol:\r\n cursymbol = symbol\r\n counter += 1\r\n output(i, counter)\r\n\r\nlines = np.loadtxt('B-large.in', dtype=str)\r\n\r\nfor i, line in enumerate(lines):\r\n if i > 0:\r\n solve(i, line)",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class Ui_GitPage(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ui_GitPage(object):
def setupUi(self, GitPage):
GitPage.setObjectName('GitPage')
GitPage.resize(609, 751)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(GitPage)
self.verticalLayout_2.setObjectName('verticalLayout_2')
self.headerLabel = QtWidgets.QLabel(GitPage)
self.headerLabel.setObjectName('headerLabel')
self.verticalLayout_2.addWidget(self.headerLabel)
self.line15 = QtWidgets.QFrame(GitPage)
self.line15.setFrameShape(QtWidgets.QFrame.HLine)
self.line15.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line15.setFrameShape(QtWidgets.QFrame.HLine)
self.line15.setObjectName('line15')
self.verticalLayout_2.addWidget(self.line15)
self.groupBox = QtWidgets.QGroupBox(GitPage)
self.groupBox.setObjectName('groupBox')
self.gridLayout = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout.setObjectName('gridLayout')
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setObjectName('label')
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.logSpinBox = QtWidgets.QSpinBox(self.groupBox)
self.logSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.logSpinBox.setMaximum(999999)
self.logSpinBox.setObjectName('logSpinBox')
self.gridLayout.addWidget(self.logSpinBox, 0, 1, 1, 1)
spacerItem = QtWidgets.QSpacerItem(41, 20, QtWidgets.QSizePolicy.
Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem, 0, 2, 1, 1)
self.label_7 = QtWidgets.QLabel(self.groupBox)
self.label_7.setObjectName('label_7')
self.gridLayout.addWidget(self.label_7, 1, 0, 1, 1)
self.logWidthSpinBox = QtWidgets.QSpinBox(self.groupBox)
self.logWidthSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.logWidthSpinBox.setMinimum(10)
self.logWidthSpinBox.setObjectName('logWidthSpinBox')
self.gridLayout.addWidget(self.logWidthSpinBox, 1, 1, 1, 1)
self.verticalLayout_2.addWidget(self.groupBox)
self.groupBox_2 = QtWidgets.QGroupBox(GitPage)
self.groupBox_2.setObjectName('groupBox_2')
self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_2)
self.gridLayout_2.setObjectName('gridLayout_2')
self.label_2 = QtWidgets.QLabel(self.groupBox_2)
self.label_2.setObjectName('label_2')
self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)
self.commitSpinBox = QtWidgets.QSpinBox(self.groupBox_2)
self.commitSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.commitSpinBox.setMinimum(1)
self.commitSpinBox.setMaximum(100)
self.commitSpinBox.setObjectName('commitSpinBox')
self.gridLayout_2.addWidget(self.commitSpinBox, 0, 1, 1, 1)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.
Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem1, 0, 2, 1, 1)
self.label_4 = QtWidgets.QLabel(self.groupBox_2)
self.label_4.setObjectName('label_4')
self.gridLayout_2.addWidget(self.label_4, 1, 0, 1, 1)
self.commitIdSpinBox = QtWidgets.QSpinBox(self.groupBox_2)
self.commitIdSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.commitIdSpinBox.setMinimum(1)
self.commitIdSpinBox.setMaximum(40)
self.commitIdSpinBox.setObjectName('commitIdSpinBox')
self.gridLayout_2.addWidget(self.commitIdSpinBox, 1, 1, 1, 1)
spacerItem2 = QtWidgets.QSpacerItem(269, 20, QtWidgets.QSizePolicy.
Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem2, 1, 2, 1, 1)
self.verticalLayout_2.addWidget(self.groupBox_2)
self.groupBox_5 = QtWidgets.QGroupBox(GitPage)
self.groupBox_5.setObjectName('groupBox_5')
self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_5)
self.horizontalLayout.setObjectName('horizontalLayout')
self.label_3 = QtWidgets.QLabel(self.groupBox_5)
self.label_3.setObjectName('label_3')
self.horizontalLayout.addWidget(self.label_3)
self.cleanupPatternEdit = QtWidgets.QLineEdit(self.groupBox_5)
self.cleanupPatternEdit.setObjectName('cleanupPatternEdit')
self.horizontalLayout.addWidget(self.cleanupPatternEdit)
self.verticalLayout_2.addWidget(self.groupBox_5)
self.groupBox_3 = QtWidgets.QGroupBox(GitPage)
self.groupBox_3.setObjectName('groupBox_3')
self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox_3)
self.verticalLayout.setObjectName('verticalLayout')
self.aggressiveCheckBox = QtWidgets.QCheckBox(self.groupBox_3)
self.aggressiveCheckBox.setObjectName('aggressiveCheckBox')
self.verticalLayout.addWidget(self.aggressiveCheckBox)
self.verticalLayout_2.addWidget(self.groupBox_3)
self.configButton = QtWidgets.QPushButton(GitPage)
self.configButton.setObjectName('configButton')
self.verticalLayout_2.addWidget(self.configButton)
spacerItem3 = QtWidgets.QSpacerItem(388, 21, QtWidgets.QSizePolicy.
Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem3)
self.retranslateUi(GitPage)
QtCore.QMetaObject.connectSlotsByName(GitPage)
GitPage.setTabOrder(self.logSpinBox, self.logWidthSpinBox)
GitPage.setTabOrder(self.logWidthSpinBox, self.commitSpinBox)
GitPage.setTabOrder(self.commitSpinBox, self.commitIdSpinBox)
GitPage.setTabOrder(self.commitIdSpinBox, self.cleanupPatternEdit)
GitPage.setTabOrder(self.cleanupPatternEdit, self.aggressiveCheckBox)
GitPage.setTabOrder(self.aggressiveCheckBox, self.configButton)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Ui_GitPage(object):
def setupUi(self, GitPage):
GitPage.setObjectName('GitPage')
GitPage.resize(609, 751)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(GitPage)
self.verticalLayout_2.setObjectName('verticalLayout_2')
self.headerLabel = QtWidgets.QLabel(GitPage)
self.headerLabel.setObjectName('headerLabel')
self.verticalLayout_2.addWidget(self.headerLabel)
self.line15 = QtWidgets.QFrame(GitPage)
self.line15.setFrameShape(QtWidgets.QFrame.HLine)
self.line15.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line15.setFrameShape(QtWidgets.QFrame.HLine)
self.line15.setObjectName('line15')
self.verticalLayout_2.addWidget(self.line15)
self.groupBox = QtWidgets.QGroupBox(GitPage)
self.groupBox.setObjectName('groupBox')
self.gridLayout = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout.setObjectName('gridLayout')
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setObjectName('label')
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.logSpinBox = QtWidgets.QSpinBox(self.groupBox)
self.logSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.logSpinBox.setMaximum(999999)
self.logSpinBox.setObjectName('logSpinBox')
self.gridLayout.addWidget(self.logSpinBox, 0, 1, 1, 1)
spacerItem = QtWidgets.QSpacerItem(41, 20, QtWidgets.QSizePolicy.
Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem, 0, 2, 1, 1)
self.label_7 = QtWidgets.QLabel(self.groupBox)
self.label_7.setObjectName('label_7')
self.gridLayout.addWidget(self.label_7, 1, 0, 1, 1)
self.logWidthSpinBox = QtWidgets.QSpinBox(self.groupBox)
self.logWidthSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.logWidthSpinBox.setMinimum(10)
self.logWidthSpinBox.setObjectName('logWidthSpinBox')
self.gridLayout.addWidget(self.logWidthSpinBox, 1, 1, 1, 1)
self.verticalLayout_2.addWidget(self.groupBox)
self.groupBox_2 = QtWidgets.QGroupBox(GitPage)
self.groupBox_2.setObjectName('groupBox_2')
self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_2)
self.gridLayout_2.setObjectName('gridLayout_2')
self.label_2 = QtWidgets.QLabel(self.groupBox_2)
self.label_2.setObjectName('label_2')
self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)
self.commitSpinBox = QtWidgets.QSpinBox(self.groupBox_2)
self.commitSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.commitSpinBox.setMinimum(1)
self.commitSpinBox.setMaximum(100)
self.commitSpinBox.setObjectName('commitSpinBox')
self.gridLayout_2.addWidget(self.commitSpinBox, 0, 1, 1, 1)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.
Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem1, 0, 2, 1, 1)
self.label_4 = QtWidgets.QLabel(self.groupBox_2)
self.label_4.setObjectName('label_4')
self.gridLayout_2.addWidget(self.label_4, 1, 0, 1, 1)
self.commitIdSpinBox = QtWidgets.QSpinBox(self.groupBox_2)
self.commitIdSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.commitIdSpinBox.setMinimum(1)
self.commitIdSpinBox.setMaximum(40)
self.commitIdSpinBox.setObjectName('commitIdSpinBox')
self.gridLayout_2.addWidget(self.commitIdSpinBox, 1, 1, 1, 1)
spacerItem2 = QtWidgets.QSpacerItem(269, 20, QtWidgets.QSizePolicy.
Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem2, 1, 2, 1, 1)
self.verticalLayout_2.addWidget(self.groupBox_2)
self.groupBox_5 = QtWidgets.QGroupBox(GitPage)
self.groupBox_5.setObjectName('groupBox_5')
self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_5)
self.horizontalLayout.setObjectName('horizontalLayout')
self.label_3 = QtWidgets.QLabel(self.groupBox_5)
self.label_3.setObjectName('label_3')
self.horizontalLayout.addWidget(self.label_3)
self.cleanupPatternEdit = QtWidgets.QLineEdit(self.groupBox_5)
self.cleanupPatternEdit.setObjectName('cleanupPatternEdit')
self.horizontalLayout.addWidget(self.cleanupPatternEdit)
self.verticalLayout_2.addWidget(self.groupBox_5)
self.groupBox_3 = QtWidgets.QGroupBox(GitPage)
self.groupBox_3.setObjectName('groupBox_3')
self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox_3)
self.verticalLayout.setObjectName('verticalLayout')
self.aggressiveCheckBox = QtWidgets.QCheckBox(self.groupBox_3)
self.aggressiveCheckBox.setObjectName('aggressiveCheckBox')
self.verticalLayout.addWidget(self.aggressiveCheckBox)
self.verticalLayout_2.addWidget(self.groupBox_3)
self.configButton = QtWidgets.QPushButton(GitPage)
self.configButton.setObjectName('configButton')
self.verticalLayout_2.addWidget(self.configButton)
spacerItem3 = QtWidgets.QSpacerItem(388, 21, QtWidgets.QSizePolicy.
Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem3)
self.retranslateUi(GitPage)
QtCore.QMetaObject.connectSlotsByName(GitPage)
GitPage.setTabOrder(self.logSpinBox, self.logWidthSpinBox)
GitPage.setTabOrder(self.logWidthSpinBox, self.commitSpinBox)
GitPage.setTabOrder(self.commitSpinBox, self.commitIdSpinBox)
GitPage.setTabOrder(self.commitIdSpinBox, self.cleanupPatternEdit)
GitPage.setTabOrder(self.cleanupPatternEdit, self.aggressiveCheckBox)
GitPage.setTabOrder(self.aggressiveCheckBox, self.configButton)
def retranslateUi(self, GitPage):
_translate = QtCore.QCoreApplication.translate
self.headerLabel.setText(_translate('GitPage',
'<b>Configure Git Interface</b>'))
self.groupBox.setTitle(_translate('GitPage', 'Log'))
self.label.setText(_translate('GitPage', 'No. of log messages shown:'))
self.logSpinBox.setToolTip(_translate('GitPage',
'Enter the number of log messages to be shown'))
self.label_7.setText(_translate('GitPage',
'No. of subject characters shown in list:'))
self.logWidthSpinBox.setToolTip(_translate('GitPage',
'Enter the number of characters of the commit subject to be shown in the list'
))
self.groupBox_2.setTitle(_translate('GitPage', 'Commit'))
self.label_2.setText(_translate('GitPage',
'No. of commit messages to remember:'))
self.commitSpinBox.setToolTip(_translate('GitPage',
'Enter the number of commit messages to remember'))
self.label_4.setText(_translate('GitPage', 'Commit ID length:'))
self.commitIdSpinBox.setToolTip(_translate('GitPage',
'Enter the number of character to show for the commit ID'))
self.groupBox_5.setTitle(_translate('GitPage', 'Cleanup'))
self.label_3.setText(_translate('GitPage', 'Pattern:'))
self.cleanupPatternEdit.setToolTip(_translate('GitPage',
'Enter the file name patterns to be used for cleaning up (entries separated by a space character)'
))
self.groupBox_3.setTitle(_translate('GitPage',
'Repository Optimization'))
self.aggressiveCheckBox.setToolTip(_translate('GitPage',
"Select this to use the '--aggressive' option for garbage collection"
))
self.aggressiveCheckBox.setText(_translate('GitPage',
'Perform aggressive repository optimization'))
self.configButton.setToolTip(_translate('GitPage',
'Edit the Git configuration file'))
self.configButton.setText(_translate('GitPage',
'Edit configuration file'))
<|reserved_special_token_1|>
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_GitPage(object):
def setupUi(self, GitPage):
GitPage.setObjectName('GitPage')
GitPage.resize(609, 751)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(GitPage)
self.verticalLayout_2.setObjectName('verticalLayout_2')
self.headerLabel = QtWidgets.QLabel(GitPage)
self.headerLabel.setObjectName('headerLabel')
self.verticalLayout_2.addWidget(self.headerLabel)
self.line15 = QtWidgets.QFrame(GitPage)
self.line15.setFrameShape(QtWidgets.QFrame.HLine)
self.line15.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line15.setFrameShape(QtWidgets.QFrame.HLine)
self.line15.setObjectName('line15')
self.verticalLayout_2.addWidget(self.line15)
self.groupBox = QtWidgets.QGroupBox(GitPage)
self.groupBox.setObjectName('groupBox')
self.gridLayout = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout.setObjectName('gridLayout')
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setObjectName('label')
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.logSpinBox = QtWidgets.QSpinBox(self.groupBox)
self.logSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.logSpinBox.setMaximum(999999)
self.logSpinBox.setObjectName('logSpinBox')
self.gridLayout.addWidget(self.logSpinBox, 0, 1, 1, 1)
spacerItem = QtWidgets.QSpacerItem(41, 20, QtWidgets.QSizePolicy.
Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem, 0, 2, 1, 1)
self.label_7 = QtWidgets.QLabel(self.groupBox)
self.label_7.setObjectName('label_7')
self.gridLayout.addWidget(self.label_7, 1, 0, 1, 1)
self.logWidthSpinBox = QtWidgets.QSpinBox(self.groupBox)
self.logWidthSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.logWidthSpinBox.setMinimum(10)
self.logWidthSpinBox.setObjectName('logWidthSpinBox')
self.gridLayout.addWidget(self.logWidthSpinBox, 1, 1, 1, 1)
self.verticalLayout_2.addWidget(self.groupBox)
self.groupBox_2 = QtWidgets.QGroupBox(GitPage)
self.groupBox_2.setObjectName('groupBox_2')
self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_2)
self.gridLayout_2.setObjectName('gridLayout_2')
self.label_2 = QtWidgets.QLabel(self.groupBox_2)
self.label_2.setObjectName('label_2')
self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)
self.commitSpinBox = QtWidgets.QSpinBox(self.groupBox_2)
self.commitSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.commitSpinBox.setMinimum(1)
self.commitSpinBox.setMaximum(100)
self.commitSpinBox.setObjectName('commitSpinBox')
self.gridLayout_2.addWidget(self.commitSpinBox, 0, 1, 1, 1)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.
Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem1, 0, 2, 1, 1)
self.label_4 = QtWidgets.QLabel(self.groupBox_2)
self.label_4.setObjectName('label_4')
self.gridLayout_2.addWidget(self.label_4, 1, 0, 1, 1)
self.commitIdSpinBox = QtWidgets.QSpinBox(self.groupBox_2)
self.commitIdSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.
AlignTrailing | QtCore.Qt.AlignVCenter)
self.commitIdSpinBox.setMinimum(1)
self.commitIdSpinBox.setMaximum(40)
self.commitIdSpinBox.setObjectName('commitIdSpinBox')
self.gridLayout_2.addWidget(self.commitIdSpinBox, 1, 1, 1, 1)
spacerItem2 = QtWidgets.QSpacerItem(269, 20, QtWidgets.QSizePolicy.
Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem2, 1, 2, 1, 1)
self.verticalLayout_2.addWidget(self.groupBox_2)
self.groupBox_5 = QtWidgets.QGroupBox(GitPage)
self.groupBox_5.setObjectName('groupBox_5')
self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_5)
self.horizontalLayout.setObjectName('horizontalLayout')
self.label_3 = QtWidgets.QLabel(self.groupBox_5)
self.label_3.setObjectName('label_3')
self.horizontalLayout.addWidget(self.label_3)
self.cleanupPatternEdit = QtWidgets.QLineEdit(self.groupBox_5)
self.cleanupPatternEdit.setObjectName('cleanupPatternEdit')
self.horizontalLayout.addWidget(self.cleanupPatternEdit)
self.verticalLayout_2.addWidget(self.groupBox_5)
self.groupBox_3 = QtWidgets.QGroupBox(GitPage)
self.groupBox_3.setObjectName('groupBox_3')
self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox_3)
self.verticalLayout.setObjectName('verticalLayout')
self.aggressiveCheckBox = QtWidgets.QCheckBox(self.groupBox_3)
self.aggressiveCheckBox.setObjectName('aggressiveCheckBox')
self.verticalLayout.addWidget(self.aggressiveCheckBox)
self.verticalLayout_2.addWidget(self.groupBox_3)
self.configButton = QtWidgets.QPushButton(GitPage)
self.configButton.setObjectName('configButton')
self.verticalLayout_2.addWidget(self.configButton)
spacerItem3 = QtWidgets.QSpacerItem(388, 21, QtWidgets.QSizePolicy.
Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem3)
self.retranslateUi(GitPage)
QtCore.QMetaObject.connectSlotsByName(GitPage)
GitPage.setTabOrder(self.logSpinBox, self.logWidthSpinBox)
GitPage.setTabOrder(self.logWidthSpinBox, self.commitSpinBox)
GitPage.setTabOrder(self.commitSpinBox, self.commitIdSpinBox)
GitPage.setTabOrder(self.commitIdSpinBox, self.cleanupPatternEdit)
GitPage.setTabOrder(self.cleanupPatternEdit, self.aggressiveCheckBox)
GitPage.setTabOrder(self.aggressiveCheckBox, self.configButton)
def retranslateUi(self, GitPage):
_translate = QtCore.QCoreApplication.translate
self.headerLabel.setText(_translate('GitPage',
'<b>Configure Git Interface</b>'))
self.groupBox.setTitle(_translate('GitPage', 'Log'))
self.label.setText(_translate('GitPage', 'No. of log messages shown:'))
self.logSpinBox.setToolTip(_translate('GitPage',
'Enter the number of log messages to be shown'))
self.label_7.setText(_translate('GitPage',
'No. of subject characters shown in list:'))
self.logWidthSpinBox.setToolTip(_translate('GitPage',
'Enter the number of characters of the commit subject to be shown in the list'
))
self.groupBox_2.setTitle(_translate('GitPage', 'Commit'))
self.label_2.setText(_translate('GitPage',
'No. of commit messages to remember:'))
self.commitSpinBox.setToolTip(_translate('GitPage',
'Enter the number of commit messages to remember'))
self.label_4.setText(_translate('GitPage', 'Commit ID length:'))
self.commitIdSpinBox.setToolTip(_translate('GitPage',
'Enter the number of character to show for the commit ID'))
self.groupBox_5.setTitle(_translate('GitPage', 'Cleanup'))
self.label_3.setText(_translate('GitPage', 'Pattern:'))
self.cleanupPatternEdit.setToolTip(_translate('GitPage',
'Enter the file name patterns to be used for cleaning up (entries separated by a space character)'
))
self.groupBox_3.setTitle(_translate('GitPage',
'Repository Optimization'))
self.aggressiveCheckBox.setToolTip(_translate('GitPage',
"Select this to use the '--aggressive' option for garbage collection"
))
self.aggressiveCheckBox.setText(_translate('GitPage',
'Perform aggressive repository optimization'))
self.configButton.setToolTip(_translate('GitPage',
'Edit the Git configuration file'))
self.configButton.setText(_translate('GitPage',
'Edit configuration file'))
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/cypher/.eric6/eric6plugins/vcsGit/ConfigurationPage/GitPage.ui'
#
# Created by: PyQt5 UI code generator 5.8
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_GitPage(object):
def setupUi(self, GitPage):
GitPage.setObjectName("GitPage")
GitPage.resize(609, 751)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(GitPage)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.headerLabel = QtWidgets.QLabel(GitPage)
self.headerLabel.setObjectName("headerLabel")
self.verticalLayout_2.addWidget(self.headerLabel)
self.line15 = QtWidgets.QFrame(GitPage)
self.line15.setFrameShape(QtWidgets.QFrame.HLine)
self.line15.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line15.setFrameShape(QtWidgets.QFrame.HLine)
self.line15.setObjectName("line15")
self.verticalLayout_2.addWidget(self.line15)
self.groupBox = QtWidgets.QGroupBox(GitPage)
self.groupBox.setObjectName("groupBox")
self.gridLayout = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout.setObjectName("gridLayout")
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.logSpinBox = QtWidgets.QSpinBox(self.groupBox)
self.logSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.logSpinBox.setMaximum(999999)
self.logSpinBox.setObjectName("logSpinBox")
self.gridLayout.addWidget(self.logSpinBox, 0, 1, 1, 1)
spacerItem = QtWidgets.QSpacerItem(41, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem, 0, 2, 1, 1)
self.label_7 = QtWidgets.QLabel(self.groupBox)
self.label_7.setObjectName("label_7")
self.gridLayout.addWidget(self.label_7, 1, 0, 1, 1)
self.logWidthSpinBox = QtWidgets.QSpinBox(self.groupBox)
self.logWidthSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.logWidthSpinBox.setMinimum(10)
self.logWidthSpinBox.setObjectName("logWidthSpinBox")
self.gridLayout.addWidget(self.logWidthSpinBox, 1, 1, 1, 1)
self.verticalLayout_2.addWidget(self.groupBox)
self.groupBox_2 = QtWidgets.QGroupBox(GitPage)
self.groupBox_2.setObjectName("groupBox_2")
self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_2)
self.gridLayout_2.setObjectName("gridLayout_2")
self.label_2 = QtWidgets.QLabel(self.groupBox_2)
self.label_2.setObjectName("label_2")
self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)
self.commitSpinBox = QtWidgets.QSpinBox(self.groupBox_2)
self.commitSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.commitSpinBox.setMinimum(1)
self.commitSpinBox.setMaximum(100)
self.commitSpinBox.setObjectName("commitSpinBox")
self.gridLayout_2.addWidget(self.commitSpinBox, 0, 1, 1, 1)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem1, 0, 2, 1, 1)
self.label_4 = QtWidgets.QLabel(self.groupBox_2)
self.label_4.setObjectName("label_4")
self.gridLayout_2.addWidget(self.label_4, 1, 0, 1, 1)
self.commitIdSpinBox = QtWidgets.QSpinBox(self.groupBox_2)
self.commitIdSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.commitIdSpinBox.setMinimum(1)
self.commitIdSpinBox.setMaximum(40)
self.commitIdSpinBox.setObjectName("commitIdSpinBox")
self.gridLayout_2.addWidget(self.commitIdSpinBox, 1, 1, 1, 1)
spacerItem2 = QtWidgets.QSpacerItem(269, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem2, 1, 2, 1, 1)
self.verticalLayout_2.addWidget(self.groupBox_2)
self.groupBox_5 = QtWidgets.QGroupBox(GitPage)
self.groupBox_5.setObjectName("groupBox_5")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_5)
self.horizontalLayout.setObjectName("horizontalLayout")
self.label_3 = QtWidgets.QLabel(self.groupBox_5)
self.label_3.setObjectName("label_3")
self.horizontalLayout.addWidget(self.label_3)
self.cleanupPatternEdit = QtWidgets.QLineEdit(self.groupBox_5)
self.cleanupPatternEdit.setObjectName("cleanupPatternEdit")
self.horizontalLayout.addWidget(self.cleanupPatternEdit)
self.verticalLayout_2.addWidget(self.groupBox_5)
self.groupBox_3 = QtWidgets.QGroupBox(GitPage)
self.groupBox_3.setObjectName("groupBox_3")
self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox_3)
self.verticalLayout.setObjectName("verticalLayout")
self.aggressiveCheckBox = QtWidgets.QCheckBox(self.groupBox_3)
self.aggressiveCheckBox.setObjectName("aggressiveCheckBox")
self.verticalLayout.addWidget(self.aggressiveCheckBox)
self.verticalLayout_2.addWidget(self.groupBox_3)
self.configButton = QtWidgets.QPushButton(GitPage)
self.configButton.setObjectName("configButton")
self.verticalLayout_2.addWidget(self.configButton)
spacerItem3 = QtWidgets.QSpacerItem(388, 21, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem3)
self.retranslateUi(GitPage)
QtCore.QMetaObject.connectSlotsByName(GitPage)
GitPage.setTabOrder(self.logSpinBox, self.logWidthSpinBox)
GitPage.setTabOrder(self.logWidthSpinBox, self.commitSpinBox)
GitPage.setTabOrder(self.commitSpinBox, self.commitIdSpinBox)
GitPage.setTabOrder(self.commitIdSpinBox, self.cleanupPatternEdit)
GitPage.setTabOrder(self.cleanupPatternEdit, self.aggressiveCheckBox)
GitPage.setTabOrder(self.aggressiveCheckBox, self.configButton)
def retranslateUi(self, GitPage):
_translate = QtCore.QCoreApplication.translate
self.headerLabel.setText(_translate("GitPage", "<b>Configure Git Interface</b>"))
self.groupBox.setTitle(_translate("GitPage", "Log"))
self.label.setText(_translate("GitPage", "No. of log messages shown:"))
self.logSpinBox.setToolTip(_translate("GitPage", "Enter the number of log messages to be shown"))
self.label_7.setText(_translate("GitPage", "No. of subject characters shown in list:"))
self.logWidthSpinBox.setToolTip(_translate("GitPage", "Enter the number of characters of the commit subject to be shown in the list"))
self.groupBox_2.setTitle(_translate("GitPage", "Commit"))
self.label_2.setText(_translate("GitPage", "No. of commit messages to remember:"))
self.commitSpinBox.setToolTip(_translate("GitPage", "Enter the number of commit messages to remember"))
self.label_4.setText(_translate("GitPage", "Commit ID length:"))
self.commitIdSpinBox.setToolTip(_translate("GitPage", "Enter the number of character to show for the commit ID"))
self.groupBox_5.setTitle(_translate("GitPage", "Cleanup"))
self.label_3.setText(_translate("GitPage", "Pattern:"))
self.cleanupPatternEdit.setToolTip(_translate("GitPage", "Enter the file name patterns to be used for cleaning up (entries separated by a space character)"))
self.groupBox_3.setTitle(_translate("GitPage", "Repository Optimization"))
self.aggressiveCheckBox.setToolTip(_translate("GitPage", "Select this to use the \'--aggressive\' option for garbage collection"))
self.aggressiveCheckBox.setText(_translate("GitPage", "Perform aggressive repository optimization"))
self.configButton.setToolTip(_translate("GitPage", "Edit the Git configuration file"))
self.configButton.setText(_translate("GitPage", "Edit configuration file"))
|
flexible
|
{
"blob_id": "80891a4c9703f91509d2c1b22304f33426dfb962",
"index": 4419,
"step-1": "<mask token>\n\n\nclass Ui_GitPage(object):\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass Ui_GitPage(object):\n\n def setupUi(self, GitPage):\n GitPage.setObjectName('GitPage')\n GitPage.resize(609, 751)\n self.verticalLayout_2 = QtWidgets.QVBoxLayout(GitPage)\n self.verticalLayout_2.setObjectName('verticalLayout_2')\n self.headerLabel = QtWidgets.QLabel(GitPage)\n self.headerLabel.setObjectName('headerLabel')\n self.verticalLayout_2.addWidget(self.headerLabel)\n self.line15 = QtWidgets.QFrame(GitPage)\n self.line15.setFrameShape(QtWidgets.QFrame.HLine)\n self.line15.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.line15.setFrameShape(QtWidgets.QFrame.HLine)\n self.line15.setObjectName('line15')\n self.verticalLayout_2.addWidget(self.line15)\n self.groupBox = QtWidgets.QGroupBox(GitPage)\n self.groupBox.setObjectName('groupBox')\n self.gridLayout = QtWidgets.QGridLayout(self.groupBox)\n self.gridLayout.setObjectName('gridLayout')\n self.label = QtWidgets.QLabel(self.groupBox)\n self.label.setObjectName('label')\n self.gridLayout.addWidget(self.label, 0, 0, 1, 1)\n self.logSpinBox = QtWidgets.QSpinBox(self.groupBox)\n self.logSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.logSpinBox.setMaximum(999999)\n self.logSpinBox.setObjectName('logSpinBox')\n self.gridLayout.addWidget(self.logSpinBox, 0, 1, 1, 1)\n spacerItem = QtWidgets.QSpacerItem(41, 20, QtWidgets.QSizePolicy.\n Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout.addItem(spacerItem, 0, 2, 1, 1)\n self.label_7 = QtWidgets.QLabel(self.groupBox)\n self.label_7.setObjectName('label_7')\n self.gridLayout.addWidget(self.label_7, 1, 0, 1, 1)\n self.logWidthSpinBox = QtWidgets.QSpinBox(self.groupBox)\n self.logWidthSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.logWidthSpinBox.setMinimum(10)\n self.logWidthSpinBox.setObjectName('logWidthSpinBox')\n self.gridLayout.addWidget(self.logWidthSpinBox, 1, 1, 1, 1)\n self.verticalLayout_2.addWidget(self.groupBox)\n self.groupBox_2 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_2.setObjectName('groupBox_2')\n self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_2)\n self.gridLayout_2.setObjectName('gridLayout_2')\n self.label_2 = QtWidgets.QLabel(self.groupBox_2)\n self.label_2.setObjectName('label_2')\n self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)\n self.commitSpinBox = QtWidgets.QSpinBox(self.groupBox_2)\n self.commitSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.commitSpinBox.setMinimum(1)\n self.commitSpinBox.setMaximum(100)\n self.commitSpinBox.setObjectName('commitSpinBox')\n self.gridLayout_2.addWidget(self.commitSpinBox, 0, 1, 1, 1)\n spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.\n Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout_2.addItem(spacerItem1, 0, 2, 1, 1)\n self.label_4 = QtWidgets.QLabel(self.groupBox_2)\n self.label_4.setObjectName('label_4')\n self.gridLayout_2.addWidget(self.label_4, 1, 0, 1, 1)\n self.commitIdSpinBox = QtWidgets.QSpinBox(self.groupBox_2)\n self.commitIdSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.commitIdSpinBox.setMinimum(1)\n self.commitIdSpinBox.setMaximum(40)\n self.commitIdSpinBox.setObjectName('commitIdSpinBox')\n self.gridLayout_2.addWidget(self.commitIdSpinBox, 1, 1, 1, 1)\n spacerItem2 = QtWidgets.QSpacerItem(269, 20, QtWidgets.QSizePolicy.\n Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout_2.addItem(spacerItem2, 1, 2, 1, 1)\n self.verticalLayout_2.addWidget(self.groupBox_2)\n self.groupBox_5 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_5.setObjectName('groupBox_5')\n self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_5)\n self.horizontalLayout.setObjectName('horizontalLayout')\n self.label_3 = QtWidgets.QLabel(self.groupBox_5)\n self.label_3.setObjectName('label_3')\n self.horizontalLayout.addWidget(self.label_3)\n self.cleanupPatternEdit = QtWidgets.QLineEdit(self.groupBox_5)\n self.cleanupPatternEdit.setObjectName('cleanupPatternEdit')\n self.horizontalLayout.addWidget(self.cleanupPatternEdit)\n self.verticalLayout_2.addWidget(self.groupBox_5)\n self.groupBox_3 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_3.setObjectName('groupBox_3')\n self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox_3)\n self.verticalLayout.setObjectName('verticalLayout')\n self.aggressiveCheckBox = QtWidgets.QCheckBox(self.groupBox_3)\n self.aggressiveCheckBox.setObjectName('aggressiveCheckBox')\n self.verticalLayout.addWidget(self.aggressiveCheckBox)\n self.verticalLayout_2.addWidget(self.groupBox_3)\n self.configButton = QtWidgets.QPushButton(GitPage)\n self.configButton.setObjectName('configButton')\n self.verticalLayout_2.addWidget(self.configButton)\n spacerItem3 = QtWidgets.QSpacerItem(388, 21, QtWidgets.QSizePolicy.\n Minimum, QtWidgets.QSizePolicy.Expanding)\n self.verticalLayout_2.addItem(spacerItem3)\n self.retranslateUi(GitPage)\n QtCore.QMetaObject.connectSlotsByName(GitPage)\n GitPage.setTabOrder(self.logSpinBox, self.logWidthSpinBox)\n GitPage.setTabOrder(self.logWidthSpinBox, self.commitSpinBox)\n GitPage.setTabOrder(self.commitSpinBox, self.commitIdSpinBox)\n GitPage.setTabOrder(self.commitIdSpinBox, self.cleanupPatternEdit)\n GitPage.setTabOrder(self.cleanupPatternEdit, self.aggressiveCheckBox)\n GitPage.setTabOrder(self.aggressiveCheckBox, self.configButton)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Ui_GitPage(object):\n\n def setupUi(self, GitPage):\n GitPage.setObjectName('GitPage')\n GitPage.resize(609, 751)\n self.verticalLayout_2 = QtWidgets.QVBoxLayout(GitPage)\n self.verticalLayout_2.setObjectName('verticalLayout_2')\n self.headerLabel = QtWidgets.QLabel(GitPage)\n self.headerLabel.setObjectName('headerLabel')\n self.verticalLayout_2.addWidget(self.headerLabel)\n self.line15 = QtWidgets.QFrame(GitPage)\n self.line15.setFrameShape(QtWidgets.QFrame.HLine)\n self.line15.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.line15.setFrameShape(QtWidgets.QFrame.HLine)\n self.line15.setObjectName('line15')\n self.verticalLayout_2.addWidget(self.line15)\n self.groupBox = QtWidgets.QGroupBox(GitPage)\n self.groupBox.setObjectName('groupBox')\n self.gridLayout = QtWidgets.QGridLayout(self.groupBox)\n self.gridLayout.setObjectName('gridLayout')\n self.label = QtWidgets.QLabel(self.groupBox)\n self.label.setObjectName('label')\n self.gridLayout.addWidget(self.label, 0, 0, 1, 1)\n self.logSpinBox = QtWidgets.QSpinBox(self.groupBox)\n self.logSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.logSpinBox.setMaximum(999999)\n self.logSpinBox.setObjectName('logSpinBox')\n self.gridLayout.addWidget(self.logSpinBox, 0, 1, 1, 1)\n spacerItem = QtWidgets.QSpacerItem(41, 20, QtWidgets.QSizePolicy.\n Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout.addItem(spacerItem, 0, 2, 1, 1)\n self.label_7 = QtWidgets.QLabel(self.groupBox)\n self.label_7.setObjectName('label_7')\n self.gridLayout.addWidget(self.label_7, 1, 0, 1, 1)\n self.logWidthSpinBox = QtWidgets.QSpinBox(self.groupBox)\n self.logWidthSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.logWidthSpinBox.setMinimum(10)\n self.logWidthSpinBox.setObjectName('logWidthSpinBox')\n self.gridLayout.addWidget(self.logWidthSpinBox, 1, 1, 1, 1)\n self.verticalLayout_2.addWidget(self.groupBox)\n self.groupBox_2 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_2.setObjectName('groupBox_2')\n self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_2)\n self.gridLayout_2.setObjectName('gridLayout_2')\n self.label_2 = QtWidgets.QLabel(self.groupBox_2)\n self.label_2.setObjectName('label_2')\n self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)\n self.commitSpinBox = QtWidgets.QSpinBox(self.groupBox_2)\n self.commitSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.commitSpinBox.setMinimum(1)\n self.commitSpinBox.setMaximum(100)\n self.commitSpinBox.setObjectName('commitSpinBox')\n self.gridLayout_2.addWidget(self.commitSpinBox, 0, 1, 1, 1)\n spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.\n Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout_2.addItem(spacerItem1, 0, 2, 1, 1)\n self.label_4 = QtWidgets.QLabel(self.groupBox_2)\n self.label_4.setObjectName('label_4')\n self.gridLayout_2.addWidget(self.label_4, 1, 0, 1, 1)\n self.commitIdSpinBox = QtWidgets.QSpinBox(self.groupBox_2)\n self.commitIdSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.commitIdSpinBox.setMinimum(1)\n self.commitIdSpinBox.setMaximum(40)\n self.commitIdSpinBox.setObjectName('commitIdSpinBox')\n self.gridLayout_2.addWidget(self.commitIdSpinBox, 1, 1, 1, 1)\n spacerItem2 = QtWidgets.QSpacerItem(269, 20, QtWidgets.QSizePolicy.\n Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout_2.addItem(spacerItem2, 1, 2, 1, 1)\n self.verticalLayout_2.addWidget(self.groupBox_2)\n self.groupBox_5 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_5.setObjectName('groupBox_5')\n self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_5)\n self.horizontalLayout.setObjectName('horizontalLayout')\n self.label_3 = QtWidgets.QLabel(self.groupBox_5)\n self.label_3.setObjectName('label_3')\n self.horizontalLayout.addWidget(self.label_3)\n self.cleanupPatternEdit = QtWidgets.QLineEdit(self.groupBox_5)\n self.cleanupPatternEdit.setObjectName('cleanupPatternEdit')\n self.horizontalLayout.addWidget(self.cleanupPatternEdit)\n self.verticalLayout_2.addWidget(self.groupBox_5)\n self.groupBox_3 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_3.setObjectName('groupBox_3')\n self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox_3)\n self.verticalLayout.setObjectName('verticalLayout')\n self.aggressiveCheckBox = QtWidgets.QCheckBox(self.groupBox_3)\n self.aggressiveCheckBox.setObjectName('aggressiveCheckBox')\n self.verticalLayout.addWidget(self.aggressiveCheckBox)\n self.verticalLayout_2.addWidget(self.groupBox_3)\n self.configButton = QtWidgets.QPushButton(GitPage)\n self.configButton.setObjectName('configButton')\n self.verticalLayout_2.addWidget(self.configButton)\n spacerItem3 = QtWidgets.QSpacerItem(388, 21, QtWidgets.QSizePolicy.\n Minimum, QtWidgets.QSizePolicy.Expanding)\n self.verticalLayout_2.addItem(spacerItem3)\n self.retranslateUi(GitPage)\n QtCore.QMetaObject.connectSlotsByName(GitPage)\n GitPage.setTabOrder(self.logSpinBox, self.logWidthSpinBox)\n GitPage.setTabOrder(self.logWidthSpinBox, self.commitSpinBox)\n GitPage.setTabOrder(self.commitSpinBox, self.commitIdSpinBox)\n GitPage.setTabOrder(self.commitIdSpinBox, self.cleanupPatternEdit)\n GitPage.setTabOrder(self.cleanupPatternEdit, self.aggressiveCheckBox)\n GitPage.setTabOrder(self.aggressiveCheckBox, self.configButton)\n\n def retranslateUi(self, GitPage):\n _translate = QtCore.QCoreApplication.translate\n self.headerLabel.setText(_translate('GitPage',\n '<b>Configure Git Interface</b>'))\n self.groupBox.setTitle(_translate('GitPage', 'Log'))\n self.label.setText(_translate('GitPage', 'No. of log messages shown:'))\n self.logSpinBox.setToolTip(_translate('GitPage',\n 'Enter the number of log messages to be shown'))\n self.label_7.setText(_translate('GitPage',\n 'No. of subject characters shown in list:'))\n self.logWidthSpinBox.setToolTip(_translate('GitPage',\n 'Enter the number of characters of the commit subject to be shown in the list'\n ))\n self.groupBox_2.setTitle(_translate('GitPage', 'Commit'))\n self.label_2.setText(_translate('GitPage',\n 'No. of commit messages to remember:'))\n self.commitSpinBox.setToolTip(_translate('GitPage',\n 'Enter the number of commit messages to remember'))\n self.label_4.setText(_translate('GitPage', 'Commit ID length:'))\n self.commitIdSpinBox.setToolTip(_translate('GitPage',\n 'Enter the number of character to show for the commit ID'))\n self.groupBox_5.setTitle(_translate('GitPage', 'Cleanup'))\n self.label_3.setText(_translate('GitPage', 'Pattern:'))\n self.cleanupPatternEdit.setToolTip(_translate('GitPage',\n 'Enter the file name patterns to be used for cleaning up (entries separated by a space character)'\n ))\n self.groupBox_3.setTitle(_translate('GitPage',\n 'Repository Optimization'))\n self.aggressiveCheckBox.setToolTip(_translate('GitPage',\n \"Select this to use the '--aggressive' option for garbage collection\"\n ))\n self.aggressiveCheckBox.setText(_translate('GitPage',\n 'Perform aggressive repository optimization'))\n self.configButton.setToolTip(_translate('GitPage',\n 'Edit the Git configuration file'))\n self.configButton.setText(_translate('GitPage',\n 'Edit configuration file'))\n",
"step-4": "from PyQt5 import QtCore, QtGui, QtWidgets\n\n\nclass Ui_GitPage(object):\n\n def setupUi(self, GitPage):\n GitPage.setObjectName('GitPage')\n GitPage.resize(609, 751)\n self.verticalLayout_2 = QtWidgets.QVBoxLayout(GitPage)\n self.verticalLayout_2.setObjectName('verticalLayout_2')\n self.headerLabel = QtWidgets.QLabel(GitPage)\n self.headerLabel.setObjectName('headerLabel')\n self.verticalLayout_2.addWidget(self.headerLabel)\n self.line15 = QtWidgets.QFrame(GitPage)\n self.line15.setFrameShape(QtWidgets.QFrame.HLine)\n self.line15.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.line15.setFrameShape(QtWidgets.QFrame.HLine)\n self.line15.setObjectName('line15')\n self.verticalLayout_2.addWidget(self.line15)\n self.groupBox = QtWidgets.QGroupBox(GitPage)\n self.groupBox.setObjectName('groupBox')\n self.gridLayout = QtWidgets.QGridLayout(self.groupBox)\n self.gridLayout.setObjectName('gridLayout')\n self.label = QtWidgets.QLabel(self.groupBox)\n self.label.setObjectName('label')\n self.gridLayout.addWidget(self.label, 0, 0, 1, 1)\n self.logSpinBox = QtWidgets.QSpinBox(self.groupBox)\n self.logSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.logSpinBox.setMaximum(999999)\n self.logSpinBox.setObjectName('logSpinBox')\n self.gridLayout.addWidget(self.logSpinBox, 0, 1, 1, 1)\n spacerItem = QtWidgets.QSpacerItem(41, 20, QtWidgets.QSizePolicy.\n Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout.addItem(spacerItem, 0, 2, 1, 1)\n self.label_7 = QtWidgets.QLabel(self.groupBox)\n self.label_7.setObjectName('label_7')\n self.gridLayout.addWidget(self.label_7, 1, 0, 1, 1)\n self.logWidthSpinBox = QtWidgets.QSpinBox(self.groupBox)\n self.logWidthSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.logWidthSpinBox.setMinimum(10)\n self.logWidthSpinBox.setObjectName('logWidthSpinBox')\n self.gridLayout.addWidget(self.logWidthSpinBox, 1, 1, 1, 1)\n self.verticalLayout_2.addWidget(self.groupBox)\n self.groupBox_2 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_2.setObjectName('groupBox_2')\n self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_2)\n self.gridLayout_2.setObjectName('gridLayout_2')\n self.label_2 = QtWidgets.QLabel(self.groupBox_2)\n self.label_2.setObjectName('label_2')\n self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)\n self.commitSpinBox = QtWidgets.QSpinBox(self.groupBox_2)\n self.commitSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.commitSpinBox.setMinimum(1)\n self.commitSpinBox.setMaximum(100)\n self.commitSpinBox.setObjectName('commitSpinBox')\n self.gridLayout_2.addWidget(self.commitSpinBox, 0, 1, 1, 1)\n spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.\n Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout_2.addItem(spacerItem1, 0, 2, 1, 1)\n self.label_4 = QtWidgets.QLabel(self.groupBox_2)\n self.label_4.setObjectName('label_4')\n self.gridLayout_2.addWidget(self.label_4, 1, 0, 1, 1)\n self.commitIdSpinBox = QtWidgets.QSpinBox(self.groupBox_2)\n self.commitIdSpinBox.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.\n AlignTrailing | QtCore.Qt.AlignVCenter)\n self.commitIdSpinBox.setMinimum(1)\n self.commitIdSpinBox.setMaximum(40)\n self.commitIdSpinBox.setObjectName('commitIdSpinBox')\n self.gridLayout_2.addWidget(self.commitIdSpinBox, 1, 1, 1, 1)\n spacerItem2 = QtWidgets.QSpacerItem(269, 20, QtWidgets.QSizePolicy.\n Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout_2.addItem(spacerItem2, 1, 2, 1, 1)\n self.verticalLayout_2.addWidget(self.groupBox_2)\n self.groupBox_5 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_5.setObjectName('groupBox_5')\n self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_5)\n self.horizontalLayout.setObjectName('horizontalLayout')\n self.label_3 = QtWidgets.QLabel(self.groupBox_5)\n self.label_3.setObjectName('label_3')\n self.horizontalLayout.addWidget(self.label_3)\n self.cleanupPatternEdit = QtWidgets.QLineEdit(self.groupBox_5)\n self.cleanupPatternEdit.setObjectName('cleanupPatternEdit')\n self.horizontalLayout.addWidget(self.cleanupPatternEdit)\n self.verticalLayout_2.addWidget(self.groupBox_5)\n self.groupBox_3 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_3.setObjectName('groupBox_3')\n self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox_3)\n self.verticalLayout.setObjectName('verticalLayout')\n self.aggressiveCheckBox = QtWidgets.QCheckBox(self.groupBox_3)\n self.aggressiveCheckBox.setObjectName('aggressiveCheckBox')\n self.verticalLayout.addWidget(self.aggressiveCheckBox)\n self.verticalLayout_2.addWidget(self.groupBox_3)\n self.configButton = QtWidgets.QPushButton(GitPage)\n self.configButton.setObjectName('configButton')\n self.verticalLayout_2.addWidget(self.configButton)\n spacerItem3 = QtWidgets.QSpacerItem(388, 21, QtWidgets.QSizePolicy.\n Minimum, QtWidgets.QSizePolicy.Expanding)\n self.verticalLayout_2.addItem(spacerItem3)\n self.retranslateUi(GitPage)\n QtCore.QMetaObject.connectSlotsByName(GitPage)\n GitPage.setTabOrder(self.logSpinBox, self.logWidthSpinBox)\n GitPage.setTabOrder(self.logWidthSpinBox, self.commitSpinBox)\n GitPage.setTabOrder(self.commitSpinBox, self.commitIdSpinBox)\n GitPage.setTabOrder(self.commitIdSpinBox, self.cleanupPatternEdit)\n GitPage.setTabOrder(self.cleanupPatternEdit, self.aggressiveCheckBox)\n GitPage.setTabOrder(self.aggressiveCheckBox, self.configButton)\n\n def retranslateUi(self, GitPage):\n _translate = QtCore.QCoreApplication.translate\n self.headerLabel.setText(_translate('GitPage',\n '<b>Configure Git Interface</b>'))\n self.groupBox.setTitle(_translate('GitPage', 'Log'))\n self.label.setText(_translate('GitPage', 'No. of log messages shown:'))\n self.logSpinBox.setToolTip(_translate('GitPage',\n 'Enter the number of log messages to be shown'))\n self.label_7.setText(_translate('GitPage',\n 'No. of subject characters shown in list:'))\n self.logWidthSpinBox.setToolTip(_translate('GitPage',\n 'Enter the number of characters of the commit subject to be shown in the list'\n ))\n self.groupBox_2.setTitle(_translate('GitPage', 'Commit'))\n self.label_2.setText(_translate('GitPage',\n 'No. of commit messages to remember:'))\n self.commitSpinBox.setToolTip(_translate('GitPage',\n 'Enter the number of commit messages to remember'))\n self.label_4.setText(_translate('GitPage', 'Commit ID length:'))\n self.commitIdSpinBox.setToolTip(_translate('GitPage',\n 'Enter the number of character to show for the commit ID'))\n self.groupBox_5.setTitle(_translate('GitPage', 'Cleanup'))\n self.label_3.setText(_translate('GitPage', 'Pattern:'))\n self.cleanupPatternEdit.setToolTip(_translate('GitPage',\n 'Enter the file name patterns to be used for cleaning up (entries separated by a space character)'\n ))\n self.groupBox_3.setTitle(_translate('GitPage',\n 'Repository Optimization'))\n self.aggressiveCheckBox.setToolTip(_translate('GitPage',\n \"Select this to use the '--aggressive' option for garbage collection\"\n ))\n self.aggressiveCheckBox.setText(_translate('GitPage',\n 'Perform aggressive repository optimization'))\n self.configButton.setToolTip(_translate('GitPage',\n 'Edit the Git configuration file'))\n self.configButton.setText(_translate('GitPage',\n 'Edit configuration file'))\n",
"step-5": "# -*- coding: utf-8 -*-\n\n# Form implementation generated from reading ui file '/home/cypher/.eric6/eric6plugins/vcsGit/ConfigurationPage/GitPage.ui'\n#\n# Created by: PyQt5 UI code generator 5.8\n#\n# WARNING! All changes made in this file will be lost!\n\nfrom PyQt5 import QtCore, QtGui, QtWidgets\n\nclass Ui_GitPage(object):\n def setupUi(self, GitPage):\n GitPage.setObjectName(\"GitPage\")\n GitPage.resize(609, 751)\n self.verticalLayout_2 = QtWidgets.QVBoxLayout(GitPage)\n self.verticalLayout_2.setObjectName(\"verticalLayout_2\")\n self.headerLabel = QtWidgets.QLabel(GitPage)\n self.headerLabel.setObjectName(\"headerLabel\")\n self.verticalLayout_2.addWidget(self.headerLabel)\n self.line15 = QtWidgets.QFrame(GitPage)\n self.line15.setFrameShape(QtWidgets.QFrame.HLine)\n self.line15.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.line15.setFrameShape(QtWidgets.QFrame.HLine)\n self.line15.setObjectName(\"line15\")\n self.verticalLayout_2.addWidget(self.line15)\n self.groupBox = QtWidgets.QGroupBox(GitPage)\n self.groupBox.setObjectName(\"groupBox\")\n self.gridLayout = QtWidgets.QGridLayout(self.groupBox)\n self.gridLayout.setObjectName(\"gridLayout\")\n self.label = QtWidgets.QLabel(self.groupBox)\n self.label.setObjectName(\"label\")\n self.gridLayout.addWidget(self.label, 0, 0, 1, 1)\n self.logSpinBox = QtWidgets.QSpinBox(self.groupBox)\n self.logSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)\n self.logSpinBox.setMaximum(999999)\n self.logSpinBox.setObjectName(\"logSpinBox\")\n self.gridLayout.addWidget(self.logSpinBox, 0, 1, 1, 1)\n spacerItem = QtWidgets.QSpacerItem(41, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout.addItem(spacerItem, 0, 2, 1, 1)\n self.label_7 = QtWidgets.QLabel(self.groupBox)\n self.label_7.setObjectName(\"label_7\")\n self.gridLayout.addWidget(self.label_7, 1, 0, 1, 1)\n self.logWidthSpinBox = QtWidgets.QSpinBox(self.groupBox)\n self.logWidthSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)\n self.logWidthSpinBox.setMinimum(10)\n self.logWidthSpinBox.setObjectName(\"logWidthSpinBox\")\n self.gridLayout.addWidget(self.logWidthSpinBox, 1, 1, 1, 1)\n self.verticalLayout_2.addWidget(self.groupBox)\n self.groupBox_2 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_2.setObjectName(\"groupBox_2\")\n self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_2)\n self.gridLayout_2.setObjectName(\"gridLayout_2\")\n self.label_2 = QtWidgets.QLabel(self.groupBox_2)\n self.label_2.setObjectName(\"label_2\")\n self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)\n self.commitSpinBox = QtWidgets.QSpinBox(self.groupBox_2)\n self.commitSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)\n self.commitSpinBox.setMinimum(1)\n self.commitSpinBox.setMaximum(100)\n self.commitSpinBox.setObjectName(\"commitSpinBox\")\n self.gridLayout_2.addWidget(self.commitSpinBox, 0, 1, 1, 1)\n spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout_2.addItem(spacerItem1, 0, 2, 1, 1)\n self.label_4 = QtWidgets.QLabel(self.groupBox_2)\n self.label_4.setObjectName(\"label_4\")\n self.gridLayout_2.addWidget(self.label_4, 1, 0, 1, 1)\n self.commitIdSpinBox = QtWidgets.QSpinBox(self.groupBox_2)\n self.commitIdSpinBox.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)\n self.commitIdSpinBox.setMinimum(1)\n self.commitIdSpinBox.setMaximum(40)\n self.commitIdSpinBox.setObjectName(\"commitIdSpinBox\")\n self.gridLayout_2.addWidget(self.commitIdSpinBox, 1, 1, 1, 1)\n spacerItem2 = QtWidgets.QSpacerItem(269, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)\n self.gridLayout_2.addItem(spacerItem2, 1, 2, 1, 1)\n self.verticalLayout_2.addWidget(self.groupBox_2)\n self.groupBox_5 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_5.setObjectName(\"groupBox_5\")\n self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_5)\n self.horizontalLayout.setObjectName(\"horizontalLayout\")\n self.label_3 = QtWidgets.QLabel(self.groupBox_5)\n self.label_3.setObjectName(\"label_3\")\n self.horizontalLayout.addWidget(self.label_3)\n self.cleanupPatternEdit = QtWidgets.QLineEdit(self.groupBox_5)\n self.cleanupPatternEdit.setObjectName(\"cleanupPatternEdit\")\n self.horizontalLayout.addWidget(self.cleanupPatternEdit)\n self.verticalLayout_2.addWidget(self.groupBox_5)\n self.groupBox_3 = QtWidgets.QGroupBox(GitPage)\n self.groupBox_3.setObjectName(\"groupBox_3\")\n self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox_3)\n self.verticalLayout.setObjectName(\"verticalLayout\")\n self.aggressiveCheckBox = QtWidgets.QCheckBox(self.groupBox_3)\n self.aggressiveCheckBox.setObjectName(\"aggressiveCheckBox\")\n self.verticalLayout.addWidget(self.aggressiveCheckBox)\n self.verticalLayout_2.addWidget(self.groupBox_3)\n self.configButton = QtWidgets.QPushButton(GitPage)\n self.configButton.setObjectName(\"configButton\")\n self.verticalLayout_2.addWidget(self.configButton)\n spacerItem3 = QtWidgets.QSpacerItem(388, 21, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)\n self.verticalLayout_2.addItem(spacerItem3)\n\n self.retranslateUi(GitPage)\n QtCore.QMetaObject.connectSlotsByName(GitPage)\n GitPage.setTabOrder(self.logSpinBox, self.logWidthSpinBox)\n GitPage.setTabOrder(self.logWidthSpinBox, self.commitSpinBox)\n GitPage.setTabOrder(self.commitSpinBox, self.commitIdSpinBox)\n GitPage.setTabOrder(self.commitIdSpinBox, self.cleanupPatternEdit)\n GitPage.setTabOrder(self.cleanupPatternEdit, self.aggressiveCheckBox)\n GitPage.setTabOrder(self.aggressiveCheckBox, self.configButton)\n\n def retranslateUi(self, GitPage):\n _translate = QtCore.QCoreApplication.translate\n self.headerLabel.setText(_translate(\"GitPage\", \"<b>Configure Git Interface</b>\"))\n self.groupBox.setTitle(_translate(\"GitPage\", \"Log\"))\n self.label.setText(_translate(\"GitPage\", \"No. of log messages shown:\"))\n self.logSpinBox.setToolTip(_translate(\"GitPage\", \"Enter the number of log messages to be shown\"))\n self.label_7.setText(_translate(\"GitPage\", \"No. of subject characters shown in list:\"))\n self.logWidthSpinBox.setToolTip(_translate(\"GitPage\", \"Enter the number of characters of the commit subject to be shown in the list\"))\n self.groupBox_2.setTitle(_translate(\"GitPage\", \"Commit\"))\n self.label_2.setText(_translate(\"GitPage\", \"No. of commit messages to remember:\"))\n self.commitSpinBox.setToolTip(_translate(\"GitPage\", \"Enter the number of commit messages to remember\"))\n self.label_4.setText(_translate(\"GitPage\", \"Commit ID length:\"))\n self.commitIdSpinBox.setToolTip(_translate(\"GitPage\", \"Enter the number of character to show for the commit ID\"))\n self.groupBox_5.setTitle(_translate(\"GitPage\", \"Cleanup\"))\n self.label_3.setText(_translate(\"GitPage\", \"Pattern:\"))\n self.cleanupPatternEdit.setToolTip(_translate(\"GitPage\", \"Enter the file name patterns to be used for cleaning up (entries separated by a space character)\"))\n self.groupBox_3.setTitle(_translate(\"GitPage\", \"Repository Optimization\"))\n self.aggressiveCheckBox.setToolTip(_translate(\"GitPage\", \"Select this to use the \\'--aggressive\\' option for garbage collection\"))\n self.aggressiveCheckBox.setText(_translate(\"GitPage\", \"Perform aggressive repository optimization\"))\n self.configButton.setToolTip(_translate(\"GitPage\", \"Edit the Git configuration file\"))\n self.configButton.setText(_translate(\"GitPage\", \"Edit configuration file\"))\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('mybus', '0007_auto_20160104_0053')]
operations = [migrations.RemoveField(model_name='businfo', name=
'description'), migrations.AlterField(model_name='businfo', name=
'title', field=models.CharField(max_length=255, verbose_name=
b'Bus Info')), migrations.AlterField(model_name='businfo', name=
'url', field=models.CharField(max_length=255, verbose_name=b'Bus No'))]
<|reserved_special_token_1|>
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('mybus', '0007_auto_20160104_0053')]
operations = [migrations.RemoveField(model_name='businfo', name=
'description'), migrations.AlterField(model_name='businfo', name=
'title', field=models.CharField(max_length=255, verbose_name=
b'Bus Info')), migrations.AlterField(model_name='businfo', name=
'url', field=models.CharField(max_length=255, verbose_name=b'Bus No'))]
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-03 19:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mybus', '0007_auto_20160104_0053'),
]
operations = [
migrations.RemoveField(
model_name='businfo',
name='description',
),
migrations.AlterField(
model_name='businfo',
name='title',
field=models.CharField(max_length=255, verbose_name=b'Bus Info'),
),
migrations.AlterField(
model_name='businfo',
name='url',
field=models.CharField(max_length=255, verbose_name=b'Bus No'),
),
]
|
flexible
|
{
"blob_id": "1dec7a997b0bef3226fb17e4039b053c7a2e457e",
"index": 9045,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('mybus', '0007_auto_20160104_0053')]\n operations = [migrations.RemoveField(model_name='businfo', name=\n 'description'), migrations.AlterField(model_name='businfo', name=\n 'title', field=models.CharField(max_length=255, verbose_name=\n b'Bus Info')), migrations.AlterField(model_name='businfo', name=\n 'url', field=models.CharField(max_length=255, verbose_name=b'Bus No'))]\n",
"step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('mybus', '0007_auto_20160104_0053')]\n operations = [migrations.RemoveField(model_name='businfo', name=\n 'description'), migrations.AlterField(model_name='businfo', name=\n 'title', field=models.CharField(max_length=255, verbose_name=\n b'Bus Info')), migrations.AlterField(model_name='businfo', name=\n 'url', field=models.CharField(max_length=255, verbose_name=b'Bus No'))]\n",
"step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.9.1 on 2016-01-03 19:28\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('mybus', '0007_auto_20160104_0053'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='businfo',\n name='description',\n ),\n migrations.AlterField(\n model_name='businfo',\n name='title',\n field=models.CharField(max_length=255, verbose_name=b'Bus Info'),\n ),\n migrations.AlterField(\n model_name='businfo',\n name='url',\n field=models.CharField(max_length=255, verbose_name=b'Bus No'),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
######################################################################
#
# Write something here to recognize your own file
#
# Copyright: MIT License
#
######################################################################
def multiply(value):
return value * 5
if __name__ == "__main__":
a = [0, 1, 2, 3, 4, 5]
new_empty_list = []
print(a)
for item in a:
b = multiply(item)
new_empty_list.append(b)
print(b)
print(new_empty_list)
|
normal
|
{
"blob_id": "0778b25363d50e699edf48b92f1104ab57c03172",
"index": 2015,
"step-1": "<mask token>\n",
"step-2": "def multiply(value):\n return value * 5\n\n\n<mask token>\n",
"step-3": "def multiply(value):\n return value * 5\n\n\nif __name__ == '__main__':\n a = [0, 1, 2, 3, 4, 5]\n new_empty_list = []\n print(a)\n for item in a:\n b = multiply(item)\n new_empty_list.append(b)\n print(b)\n print(new_empty_list)\n",
"step-4": "######################################################################\n#\n# Write something here to recognize your own file\n# \n# Copyright: MIT License\n#\n######################################################################\n\ndef multiply(value):\n return value * 5\n\nif __name__ == \"__main__\":\n a = [0, 1, 2, 3, 4, 5]\n new_empty_list = []\n \n print(a)\n \n for item in a:\n b = multiply(item)\n new_empty_list.append(b)\n print(b)\n \n print(new_empty_list)",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import graphene
from django.core.exceptions import ValidationError
from ....app import models
from ....app.error_codes import AppErrorCode
from ....permission.enums import AppPermission, get_permissions
from ....webhook.event_types import WebhookEventAsyncType
from ...account.utils import can_manage_app
from ...core.mutations import ModelMutation
from ...core.types import AppError
from ...core.utils import WebhookEventInfo
from ...plugins.dataloaders import get_plugin_manager_promise
from ...utils import get_user_or_app_from_context, requestor_is_superuser
from ..types import App
from ..utils import ensure_can_manage_permissions
from .app_create import AppInput
class AppUpdate(ModelMutation):
class Arguments:
id = graphene.ID(description="ID of an app to update.", required=True)
input = AppInput(
required=True,
description="Fields required to update an existing app.",
)
class Meta:
description = "Updates an existing app."
model = models.App
object_type = App
permissions = (AppPermission.MANAGE_APPS,)
error_type_class = AppError
error_type_field = "app_errors"
webhook_events_info = [
WebhookEventInfo(
type=WebhookEventAsyncType.APP_UPDATED,
description="An app was updated.",
),
]
@classmethod
def clean_input(cls, info, instance, data, **kwargs):
cleaned_input = super().clean_input(info, instance, data, **kwargs)
requestor = get_user_or_app_from_context(info.context)
if not requestor_is_superuser(requestor) and not can_manage_app(
requestor, instance
):
msg = "You can't manage this app."
code = AppErrorCode.OUT_OF_SCOPE_APP.value
raise ValidationError({"id": ValidationError(msg, code=code)})
# clean and prepare permissions
if "permissions" in cleaned_input:
permissions = cleaned_input.pop("permissions")
cleaned_input["permissions"] = get_permissions(permissions)
ensure_can_manage_permissions(requestor, permissions)
return cleaned_input
@classmethod
def post_save_action(cls, info, instance, cleaned_input):
manager = get_plugin_manager_promise(info.context).get()
cls.call_event(manager.app_updated, instance)
|
normal
|
{
"blob_id": "972a063bab35926472be592e6a17d450034fbf37",
"index": 4745,
"step-1": "<mask token>\n\n\nclass AppUpdate(ModelMutation):\n\n\n class Arguments:\n id = graphene.ID(description='ID of an app to update.', required=True)\n input = AppInput(required=True, description=\n 'Fields required to update an existing app.')\n\n\n class Meta:\n description = 'Updates an existing app.'\n model = models.App\n object_type = App\n permissions = AppPermission.MANAGE_APPS,\n error_type_class = AppError\n error_type_field = 'app_errors'\n webhook_events_info = [WebhookEventInfo(type=WebhookEventAsyncType.\n APP_UPDATED, description='An app was updated.')]\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass AppUpdate(ModelMutation):\n\n\n class Arguments:\n id = graphene.ID(description='ID of an app to update.', required=True)\n input = AppInput(required=True, description=\n 'Fields required to update an existing app.')\n\n\n class Meta:\n description = 'Updates an existing app.'\n model = models.App\n object_type = App\n permissions = AppPermission.MANAGE_APPS,\n error_type_class = AppError\n error_type_field = 'app_errors'\n webhook_events_info = [WebhookEventInfo(type=WebhookEventAsyncType.\n APP_UPDATED, description='An app was updated.')]\n\n @classmethod\n def clean_input(cls, info, instance, data, **kwargs):\n cleaned_input = super().clean_input(info, instance, data, **kwargs)\n requestor = get_user_or_app_from_context(info.context)\n if not requestor_is_superuser(requestor) and not can_manage_app(\n requestor, instance):\n msg = \"You can't manage this app.\"\n code = AppErrorCode.OUT_OF_SCOPE_APP.value\n raise ValidationError({'id': ValidationError(msg, code=code)})\n if 'permissions' in cleaned_input:\n permissions = cleaned_input.pop('permissions')\n cleaned_input['permissions'] = get_permissions(permissions)\n ensure_can_manage_permissions(requestor, permissions)\n return cleaned_input\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass AppUpdate(ModelMutation):\n\n\n class Arguments:\n id = graphene.ID(description='ID of an app to update.', required=True)\n input = AppInput(required=True, description=\n 'Fields required to update an existing app.')\n\n\n class Meta:\n description = 'Updates an existing app.'\n model = models.App\n object_type = App\n permissions = AppPermission.MANAGE_APPS,\n error_type_class = AppError\n error_type_field = 'app_errors'\n webhook_events_info = [WebhookEventInfo(type=WebhookEventAsyncType.\n APP_UPDATED, description='An app was updated.')]\n\n @classmethod\n def clean_input(cls, info, instance, data, **kwargs):\n cleaned_input = super().clean_input(info, instance, data, **kwargs)\n requestor = get_user_or_app_from_context(info.context)\n if not requestor_is_superuser(requestor) and not can_manage_app(\n requestor, instance):\n msg = \"You can't manage this app.\"\n code = AppErrorCode.OUT_OF_SCOPE_APP.value\n raise ValidationError({'id': ValidationError(msg, code=code)})\n if 'permissions' in cleaned_input:\n permissions = cleaned_input.pop('permissions')\n cleaned_input['permissions'] = get_permissions(permissions)\n ensure_can_manage_permissions(requestor, permissions)\n return cleaned_input\n\n @classmethod\n def post_save_action(cls, info, instance, cleaned_input):\n manager = get_plugin_manager_promise(info.context).get()\n cls.call_event(manager.app_updated, instance)\n",
"step-4": "import graphene\nfrom django.core.exceptions import ValidationError\nfrom ....app import models\nfrom ....app.error_codes import AppErrorCode\nfrom ....permission.enums import AppPermission, get_permissions\nfrom ....webhook.event_types import WebhookEventAsyncType\nfrom ...account.utils import can_manage_app\nfrom ...core.mutations import ModelMutation\nfrom ...core.types import AppError\nfrom ...core.utils import WebhookEventInfo\nfrom ...plugins.dataloaders import get_plugin_manager_promise\nfrom ...utils import get_user_or_app_from_context, requestor_is_superuser\nfrom ..types import App\nfrom ..utils import ensure_can_manage_permissions\nfrom .app_create import AppInput\n\n\nclass AppUpdate(ModelMutation):\n\n\n class Arguments:\n id = graphene.ID(description='ID of an app to update.', required=True)\n input = AppInput(required=True, description=\n 'Fields required to update an existing app.')\n\n\n class Meta:\n description = 'Updates an existing app.'\n model = models.App\n object_type = App\n permissions = AppPermission.MANAGE_APPS,\n error_type_class = AppError\n error_type_field = 'app_errors'\n webhook_events_info = [WebhookEventInfo(type=WebhookEventAsyncType.\n APP_UPDATED, description='An app was updated.')]\n\n @classmethod\n def clean_input(cls, info, instance, data, **kwargs):\n cleaned_input = super().clean_input(info, instance, data, **kwargs)\n requestor = get_user_or_app_from_context(info.context)\n if not requestor_is_superuser(requestor) and not can_manage_app(\n requestor, instance):\n msg = \"You can't manage this app.\"\n code = AppErrorCode.OUT_OF_SCOPE_APP.value\n raise ValidationError({'id': ValidationError(msg, code=code)})\n if 'permissions' in cleaned_input:\n permissions = cleaned_input.pop('permissions')\n cleaned_input['permissions'] = get_permissions(permissions)\n ensure_can_manage_permissions(requestor, permissions)\n return cleaned_input\n\n @classmethod\n def post_save_action(cls, info, instance, cleaned_input):\n manager = get_plugin_manager_promise(info.context).get()\n cls.call_event(manager.app_updated, instance)\n",
"step-5": "import graphene\nfrom django.core.exceptions import ValidationError\n\nfrom ....app import models\nfrom ....app.error_codes import AppErrorCode\nfrom ....permission.enums import AppPermission, get_permissions\nfrom ....webhook.event_types import WebhookEventAsyncType\nfrom ...account.utils import can_manage_app\nfrom ...core.mutations import ModelMutation\nfrom ...core.types import AppError\nfrom ...core.utils import WebhookEventInfo\nfrom ...plugins.dataloaders import get_plugin_manager_promise\nfrom ...utils import get_user_or_app_from_context, requestor_is_superuser\nfrom ..types import App\nfrom ..utils import ensure_can_manage_permissions\nfrom .app_create import AppInput\n\n\nclass AppUpdate(ModelMutation):\n class Arguments:\n id = graphene.ID(description=\"ID of an app to update.\", required=True)\n input = AppInput(\n required=True,\n description=\"Fields required to update an existing app.\",\n )\n\n class Meta:\n description = \"Updates an existing app.\"\n model = models.App\n object_type = App\n permissions = (AppPermission.MANAGE_APPS,)\n error_type_class = AppError\n error_type_field = \"app_errors\"\n webhook_events_info = [\n WebhookEventInfo(\n type=WebhookEventAsyncType.APP_UPDATED,\n description=\"An app was updated.\",\n ),\n ]\n\n @classmethod\n def clean_input(cls, info, instance, data, **kwargs):\n cleaned_input = super().clean_input(info, instance, data, **kwargs)\n requestor = get_user_or_app_from_context(info.context)\n if not requestor_is_superuser(requestor) and not can_manage_app(\n requestor, instance\n ):\n msg = \"You can't manage this app.\"\n code = AppErrorCode.OUT_OF_SCOPE_APP.value\n raise ValidationError({\"id\": ValidationError(msg, code=code)})\n\n # clean and prepare permissions\n if \"permissions\" in cleaned_input:\n permissions = cleaned_input.pop(\"permissions\")\n cleaned_input[\"permissions\"] = get_permissions(permissions)\n ensure_can_manage_permissions(requestor, permissions)\n return cleaned_input\n\n @classmethod\n def post_save_action(cls, info, instance, cleaned_input):\n manager = get_plugin_manager_promise(info.context).get()\n cls.call_event(manager.app_updated, instance)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# In[2]:
import os
GFE_PATH = "C:\Haely\MS2017\sem2\EE 259\Project\grammatical_facial_expression"
def load_a_affirm_data(gfe_path=GFE_PATH):
csv_patha = os.path.join(gfe_path, "a_affirmative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_patha)
def load_a_affirm_target(gfe_path=GFE_PATH):
csv_targeta = os.path.join(gfe_path, "a_affirmative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targeta)
def load_a_cond_data(gfe_path=GFE_PATH):
csv_pathc = os.path.join(gfe_path, "a_conditional_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathc)
def load_a_cond_target(gfe_path=GFE_PATH):
csv_targetc = os.path.join(gfe_path, "a_conditional_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetc)
def load_a_doubtq_data(gfe_path=GFE_PATH):
csv_pathd = os.path.join(gfe_path, "a_doubt_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathd)
def load_a_doubtq_target(gfe_path=GFE_PATH):
csv_targetd = os.path.join(gfe_path, "a_doubts_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetd)
def load_a_emphasis_data(gfe_path=GFE_PATH):
csv_pathe = os.path.join(gfe_path, "a_emphasis_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathe)
def load_a_emphasis_target(gfe_path=GFE_PATH):
csv_targete = os.path.join(gfe_path, "a_emphasis_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targete)
def load_a_neg_data(gfe_path=GFE_PATH):
csv_pathn = os.path.join(gfe_path, "a_negative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathn)
def load_a_neg_target(gfe_path=GFE_PATH):
csv_targetn = os.path.join(gfe_path, "a_negative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetn)
def load_a_rel_data(gfe_path=GFE_PATH):
csv_pathr = os.path.join(gfe_path, "a_relative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathr)
def load_a_rel_target(gfe_path=GFE_PATH):
csv_targetr = os.path.join(gfe_path, "a_relative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetr)
def load_a_topics_data(gfe_path=GFE_PATH):
csv_patht = os.path.join(gfe_path, "a_topics_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_patht)
def load_a_topics_target(gfe_path=GFE_PATH):
csv_targett = os.path.join(gfe_path, "a_topics_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targett)
def load_a_wh_data(gfe_path=GFE_PATH):
csv_pathw = os.path.join(gfe_path, "a_wh_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathw)
def load_a_wh_target(gfe_path=GFE_PATH):
csv_targetw = os.path.join(gfe_path, "a_wh_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetw)
def load_a_yn_data(gfe_path=GFE_PATH):
csv_pathy = os.path.join(gfe_path, "a_yn_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathy)
def load_a_yn_target(gfe_path=GFE_PATH):
csv_targety = os.path.join(gfe_path, "a_yn_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targety)
# In[3]:
def load_b_affirm_data(gfe_path=GFE_PATH):
csv_pathab = os.path.join(gfe_path, "b_affirmative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathab)
def load_b_affirm_target(gfe_path=GFE_PATH):
csv_targetab = os.path.join(gfe_path, "b_affirmative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetab)
def load_b_cond_data(gfe_path=GFE_PATH):
csv_pathcb = os.path.join(gfe_path, "b_conditional_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathcb)
def load_b_cond_target(gfe_path=GFE_PATH):
csv_targetcb = os.path.join(gfe_path, "b_conditional_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetcb)
def load_b_doubtq_data(gfe_path=GFE_PATH):
csv_pathdb = os.path.join(gfe_path, "b_doubt_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathdb)
def load_b_doubtq_target(gfe_path=GFE_PATH):
csv_targetdb = os.path.join(gfe_path, "b_doubt_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetdb)
def load_b_emphasis_data(gfe_path=GFE_PATH):
csv_patheb = os.path.join(gfe_path, "b_emphasis_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_patheb)
def load_b_emphasis_target(gfe_path=GFE_PATH):
csv_targeteb = os.path.join(gfe_path, "b_emphasis_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targeteb)
def load_b_neg_data(gfe_path=GFE_PATH):
csv_pathnb = os.path.join(gfe_path, "b_negative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathnb)
def load_b_neg_target(gfe_path=GFE_PATH):
csv_targetnb = os.path.join(gfe_path, "b_negative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetnb)
def load_b_rel_data(gfe_path=GFE_PATH):
csv_pathrb = os.path.join(gfe_path, "b_relative_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathrb)
def load_b_rel_target(gfe_path=GFE_PATH):
csv_targetrb = os.path.join(gfe_path, "b_relative_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetrb)
def load_b_topics_data(gfe_path=GFE_PATH):
csv_pathtb = os.path.join(gfe_path, "b_topics_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathtb)
def load_b_topics_target(gfe_path=GFE_PATH):
csv_targettb = os.path.join(gfe_path, "b_topics_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targettb)
def load_b_wh_data(gfe_path=GFE_PATH):
csv_pathwb = os.path.join(gfe_path, "b_wh_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathwb)
def load_b_wh_target(gfe_path=GFE_PATH):
csv_targetwb = os.path.join(gfe_path, "b_wh_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetwb)
def load_b_yn_data(gfe_path=GFE_PATH):
csv_pathyb = os.path.join(gfe_path, "b_yn_question_datapoints.csv")
print(gfe_path)
return pd.read_csv(csv_pathyb)
def load_b_yn_target(gfe_path=GFE_PATH):
csv_targetyb = os.path.join(gfe_path, "b_yn_question_targets.csv")
print(gfe_path)
return pd.read_csv(csv_targetyb)
# In[4]:
affirmda = load_a_affirm_data()
affirmta = load_a_affirm_target()
condda = load_a_cond_data()
condta = load_a_cond_target()
doubtqda = load_a_doubtq_data()
doubtqta = load_a_doubtq_target()
emphda = load_a_emphasis_data()
emphta = load_a_emphasis_target()
negda = load_a_neg_data()
negta = load_a_neg_target()
relda = load_a_rel_data()
relta = load_a_rel_target()
topicsda = load_a_topics_data()
topicsta = load_a_topics_target()
whda = load_a_wh_data()
whta = load_a_wh_target()
ynda = load_a_yn_data()
ynta = load_a_yn_target()
# In[5]:
affirmdb = load_b_affirm_data()
affirmtb = load_b_affirm_target()
conddb = load_b_cond_data()
condtb = load_b_cond_target()
doubtqdb = load_b_doubtq_data()
doubtqtb = load_b_doubtq_target()
emphdb = load_b_emphasis_data()
emphtb = load_b_emphasis_target()
negdb = load_b_neg_data()
negtb = load_b_neg_target()
reldb = load_b_rel_data()
reltb = load_b_rel_target()
topicsdb = load_b_topics_data()
topicstb = load_b_topics_target()
whdb = load_b_wh_data()
whtb = load_b_wh_target()
yndb = load_b_yn_data()
yntb = load_b_yn_target()
# In[8]:
users_combine_affirmd = pd.concat([affirmda, affirmdb],ignore_index=True)
affirm_y = pd.concat([affirmta,affirmtb],ignore_index=True)
users_combine_condd = pd.concat([condda, conddb],ignore_index=True)
cond_y = pd.concat([condta, condtb],ignore_index=True)
users_combine_doubtqd = pd.concat([doubtqda, doubtqdb],ignore_index=True)
doubtq_y = pd.concat([doubtqta, doubtqtb],ignore_index=True)
users_combine_emphd = pd.concat([emphda, emphdb],ignore_index=True)
emph_y = pd.concat([emphta, emphtb],ignore_index=True)
users_combine_negd = pd.concat([negda, negdb],ignore_index=True)
neg_y = pd.concat([negta, negtb],ignore_index=True)
users_combine_reld = pd.concat([relda, reldb],ignore_index=True)
rel_y = pd.concat([relta, reltb],ignore_index=True)
users_combine_topicsd = pd.concat([topicsda, topicsdb],ignore_index=True)
topics_y = pd.concat([topicsta, topicstb],ignore_index=True)
users_combine_whd = pd.concat([whda, whdb],ignore_index=True)
wh_y = pd.concat([whta, whtb],ignore_index=True)
users_combine_ynd = pd.concat([ynda, yndb],ignore_index=True)
yn_y = pd.concat([ynta, yntb],ignore_index=True)
# In[11]:
users_combine_affirmd['affirm_y']=affirm_y
affirm_y.drop([10])
# In[12]:
users_combine_condd['cond_y']=cond_y
cond_y.drop([10])
# In[13]:
users_combine_doubtqd['doubtq_y']=doubtq_y
doubtq_y.drop([10])
# In[14]:
users_combine_emphd['emph_y']=emph_y
emph_y.drop([10])
# In[15]:
users_combine_negd['neg_y']=neg_y
neg_y.drop([10])
# In[16]:
users_combine_reld['rel_y']=rel_y
rel_y.drop([10])
# In[17]:
users_combine_topicsd['topics_y']=topics_y
topics_y.drop([10])
# In[18]:
users_combine_whd['wh_y']=wh_y
wh_y.drop([10])
# In[19]:
users_combine_ynd['yn_y']=yn_y
yn_y.drop([10])
# In[22]:
from sklearn.model_selection import train_test_split
ya=users_combine_affirmd['affirm_y']
Xa_train,Xa_test,ya_train,ya_test = train_test_split(users_combine_affirmd.iloc[:,1:],ya,stratify=ya)
yc=users_combine_condd['cond_y']
Xc_train,Xc_test,yc_train,yc_test = train_test_split(users_combine_condd.iloc[:,1:],yc,stratify=yc)
yd=users_combine_doubtqd['doubtq_y']
Xd_train,Xd_test,yd_train,yd_test = train_test_split(users_combine_doubtqd.iloc[:,1:],yd,stratify=yd)
ye=users_combine_emphd['emph_y']
Xe_train,Xe_test,ye_train,ye_test = train_test_split(users_combine_emphd.iloc[:,1:],ye,stratify=ye)
yn=users_combine_negd['neg_y']
Xn_train,Xn_test,yn_train,yn_test = train_test_split(users_combine_negd.iloc[:,1:],yn,stratify=yn)
yr=users_combine_reld['rel_y']
Xr_train,Xr_test,yr_train,yr_test = train_test_split(users_combine_reld.iloc[:,1:],yr,stratify=yr)
yt=users_combine_topicsd['topics_y']
Xt_train,Xt_test,yt_train,yt_test = train_test_split(users_combine_topicsd.iloc[:,1:],yt,stratify=yt)
yw=users_combine_whd['wh_y']
Xw_train,Xw_test,yw_train,yw_test = train_test_split(users_combine_whd.iloc[:,1:],yw,stratify=yw)
yy=users_combine_ynd['yn_y']
Xy_train,Xy_test,yy_train,yy_test = train_test_split(users_combine_ynd.iloc[:,1:],yy,stratify=yy)
# In[25]:
from sklearn.preprocessing import scale
from scipy import stats
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
lda_clf = LDA(solver='lsqr',store_covariance=True)
lda_clf.fit(Xa_train,ya_train)
ya_predicted = lda_clf.predict(Xa_test)
print('\n The error rate of the LDA model for affirm is {0:.2f}% '.format(100*np.mean(ya_predicted!=ya_test)))
lda_clf.fit(Xc_train,yc_train)
yc_predicted = lda_clf.predict(Xc_test)
print('\n The error rate of the LDA model for conditional is {0:.2f}% '.format(100*np.mean(yc_predicted!=yc_test)))
lda_clf.fit(Xd_train,yd_train)
yd_predicted = lda_clf.predict(Xd_test)
print('\n The error rate of the LDA model for doubt questions is {0:.2f}% '.format(100*np.mean(yd_predicted!=yd_test)))
lda_clf.fit(Xe_train,ye_train)
ye_predicted = lda_clf.predict(Xe_test)
print('\n The error rate of the LDA model for emphasis is {0:.2f}% '.format(100*np.mean(ye_predicted!=ye_test)))
lda_clf.fit(Xn_train,yn_train)
yn_predicted = lda_clf.predict(Xn_test)
print('\n The error rate of the LDA model for negative is {0:.2f}% '.format(100*np.mean(yn_predicted!=yn_test)))
lda_clf.fit(Xr_train,yr_train)
yr_predicted = lda_clf.predict(Xr_test)
print('\n The error rate of the LDA model for relativr is {0:.2f}% '.format(100*np.mean(yr_predicted!=yr_test)))
lda_clf.fit(Xt_train,yt_train)
yt_predicted = lda_clf.predict(Xt_test)
print('\n The error rate of the LDA model for topics is {0:.2f}% '.format(100*np.mean(yt_predicted!=yt_test)))
lda_clf.fit(Xw_train,yw_train)
yw_predicted = lda_clf.predict(Xw_test)
print('\n The error rate of the LDA model for wh questions is {0:.2f}% '.format(100*np.mean(yw_predicted!=yw_test)))
lda_clf.fit(Xy_train,yy_train)
yy_predicted = lda_clf.predict(Xy_test)
print('\n The error rate of the LDA model for yes or no is {0:.2f}% '.format(100*np.mean(yy_predicted!=yy_test)))
|
normal
|
{
"blob_id": "2fb8bce3a64787dbaf5a3bb3da53f70005048467",
"index": 4104,
"step-1": "<mask token>\n\n\ndef load_a_affirm_target(gfe_path=GFE_PATH):\n csv_targeta = os.path.join(gfe_path, 'a_affirmative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeta)\n\n\ndef load_a_cond_data(gfe_path=GFE_PATH):\n csv_pathc = os.path.join(gfe_path, 'a_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathc)\n\n\ndef load_a_cond_target(gfe_path=GFE_PATH):\n csv_targetc = os.path.join(gfe_path, 'a_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetc)\n\n\n<mask token>\n\n\ndef load_a_emphasis_target(gfe_path=GFE_PATH):\n csv_targete = os.path.join(gfe_path, 'a_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targete)\n\n\ndef load_a_neg_data(gfe_path=GFE_PATH):\n csv_pathn = os.path.join(gfe_path, 'a_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathn)\n\n\ndef load_a_neg_target(gfe_path=GFE_PATH):\n csv_targetn = os.path.join(gfe_path, 'a_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetn)\n\n\ndef load_a_rel_data(gfe_path=GFE_PATH):\n csv_pathr = os.path.join(gfe_path, 'a_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathr)\n\n\n<mask token>\n\n\ndef load_a_topics_data(gfe_path=GFE_PATH):\n csv_patht = os.path.join(gfe_path, 'a_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patht)\n\n\n<mask token>\n\n\ndef load_a_wh_target(gfe_path=GFE_PATH):\n csv_targetw = os.path.join(gfe_path, 'a_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetw)\n\n\n<mask token>\n\n\ndef load_b_affirm_data(gfe_path=GFE_PATH):\n csv_pathab = os.path.join(gfe_path, 'b_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathab)\n\n\n<mask token>\n\n\ndef load_b_cond_target(gfe_path=GFE_PATH):\n csv_targetcb = os.path.join(gfe_path, 'b_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetcb)\n\n\ndef load_b_doubtq_data(gfe_path=GFE_PATH):\n csv_pathdb = os.path.join(gfe_path, 'b_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathdb)\n\n\ndef load_b_doubtq_target(gfe_path=GFE_PATH):\n csv_targetdb = os.path.join(gfe_path, 'b_doubt_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetdb)\n\n\n<mask token>\n\n\ndef load_b_emphasis_target(gfe_path=GFE_PATH):\n csv_targeteb = os.path.join(gfe_path, 'b_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeteb)\n\n\ndef load_b_neg_data(gfe_path=GFE_PATH):\n csv_pathnb = os.path.join(gfe_path, 'b_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathnb)\n\n\ndef load_b_neg_target(gfe_path=GFE_PATH):\n csv_targetnb = os.path.join(gfe_path, 'b_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetnb)\n\n\n<mask token>\n\n\ndef load_b_wh_target(gfe_path=GFE_PATH):\n csv_targetwb = os.path.join(gfe_path, 'b_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetwb)\n\n\ndef load_b_yn_data(gfe_path=GFE_PATH):\n csv_pathyb = os.path.join(gfe_path, 'b_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathyb)\n\n\ndef load_b_yn_target(gfe_path=GFE_PATH):\n csv_targetyb = os.path.join(gfe_path, 'b_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetyb)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef load_a_affirm_data(gfe_path=GFE_PATH):\n csv_patha = os.path.join(gfe_path, 'a_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patha)\n\n\ndef load_a_affirm_target(gfe_path=GFE_PATH):\n csv_targeta = os.path.join(gfe_path, 'a_affirmative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeta)\n\n\ndef load_a_cond_data(gfe_path=GFE_PATH):\n csv_pathc = os.path.join(gfe_path, 'a_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathc)\n\n\ndef load_a_cond_target(gfe_path=GFE_PATH):\n csv_targetc = os.path.join(gfe_path, 'a_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetc)\n\n\n<mask token>\n\n\ndef load_a_emphasis_target(gfe_path=GFE_PATH):\n csv_targete = os.path.join(gfe_path, 'a_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targete)\n\n\ndef load_a_neg_data(gfe_path=GFE_PATH):\n csv_pathn = os.path.join(gfe_path, 'a_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathn)\n\n\ndef load_a_neg_target(gfe_path=GFE_PATH):\n csv_targetn = os.path.join(gfe_path, 'a_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetn)\n\n\ndef load_a_rel_data(gfe_path=GFE_PATH):\n csv_pathr = os.path.join(gfe_path, 'a_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathr)\n\n\ndef load_a_rel_target(gfe_path=GFE_PATH):\n csv_targetr = os.path.join(gfe_path, 'a_relative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetr)\n\n\ndef load_a_topics_data(gfe_path=GFE_PATH):\n csv_patht = os.path.join(gfe_path, 'a_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patht)\n\n\n<mask token>\n\n\ndef load_a_wh_target(gfe_path=GFE_PATH):\n csv_targetw = os.path.join(gfe_path, 'a_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetw)\n\n\ndef load_a_yn_data(gfe_path=GFE_PATH):\n csv_pathy = os.path.join(gfe_path, 'a_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathy)\n\n\ndef load_a_yn_target(gfe_path=GFE_PATH):\n csv_targety = os.path.join(gfe_path, 'a_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targety)\n\n\ndef load_b_affirm_data(gfe_path=GFE_PATH):\n csv_pathab = os.path.join(gfe_path, 'b_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathab)\n\n\n<mask token>\n\n\ndef load_b_cond_data(gfe_path=GFE_PATH):\n csv_pathcb = os.path.join(gfe_path, 'b_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathcb)\n\n\ndef load_b_cond_target(gfe_path=GFE_PATH):\n csv_targetcb = os.path.join(gfe_path, 'b_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetcb)\n\n\ndef load_b_doubtq_data(gfe_path=GFE_PATH):\n csv_pathdb = os.path.join(gfe_path, 'b_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathdb)\n\n\ndef load_b_doubtq_target(gfe_path=GFE_PATH):\n csv_targetdb = os.path.join(gfe_path, 'b_doubt_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetdb)\n\n\ndef load_b_emphasis_data(gfe_path=GFE_PATH):\n csv_patheb = os.path.join(gfe_path, 'b_emphasis_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patheb)\n\n\ndef load_b_emphasis_target(gfe_path=GFE_PATH):\n csv_targeteb = os.path.join(gfe_path, 'b_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeteb)\n\n\ndef load_b_neg_data(gfe_path=GFE_PATH):\n csv_pathnb = os.path.join(gfe_path, 'b_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathnb)\n\n\ndef load_b_neg_target(gfe_path=GFE_PATH):\n csv_targetnb = os.path.join(gfe_path, 'b_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetnb)\n\n\ndef load_b_rel_data(gfe_path=GFE_PATH):\n csv_pathrb = os.path.join(gfe_path, 'b_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathrb)\n\n\n<mask token>\n\n\ndef load_b_topics_data(gfe_path=GFE_PATH):\n csv_pathtb = os.path.join(gfe_path, 'b_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathtb)\n\n\n<mask token>\n\n\ndef load_b_wh_target(gfe_path=GFE_PATH):\n csv_targetwb = os.path.join(gfe_path, 'b_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetwb)\n\n\ndef load_b_yn_data(gfe_path=GFE_PATH):\n csv_pathyb = os.path.join(gfe_path, 'b_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathyb)\n\n\ndef load_b_yn_target(gfe_path=GFE_PATH):\n csv_targetyb = os.path.join(gfe_path, 'b_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetyb)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef load_a_affirm_data(gfe_path=GFE_PATH):\n csv_patha = os.path.join(gfe_path, 'a_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patha)\n\n\ndef load_a_affirm_target(gfe_path=GFE_PATH):\n csv_targeta = os.path.join(gfe_path, 'a_affirmative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeta)\n\n\ndef load_a_cond_data(gfe_path=GFE_PATH):\n csv_pathc = os.path.join(gfe_path, 'a_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathc)\n\n\ndef load_a_cond_target(gfe_path=GFE_PATH):\n csv_targetc = os.path.join(gfe_path, 'a_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetc)\n\n\ndef load_a_doubtq_data(gfe_path=GFE_PATH):\n csv_pathd = os.path.join(gfe_path, 'a_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathd)\n\n\n<mask token>\n\n\ndef load_a_emphasis_target(gfe_path=GFE_PATH):\n csv_targete = os.path.join(gfe_path, 'a_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targete)\n\n\ndef load_a_neg_data(gfe_path=GFE_PATH):\n csv_pathn = os.path.join(gfe_path, 'a_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathn)\n\n\ndef load_a_neg_target(gfe_path=GFE_PATH):\n csv_targetn = os.path.join(gfe_path, 'a_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetn)\n\n\ndef load_a_rel_data(gfe_path=GFE_PATH):\n csv_pathr = os.path.join(gfe_path, 'a_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathr)\n\n\ndef load_a_rel_target(gfe_path=GFE_PATH):\n csv_targetr = os.path.join(gfe_path, 'a_relative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetr)\n\n\ndef load_a_topics_data(gfe_path=GFE_PATH):\n csv_patht = os.path.join(gfe_path, 'a_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patht)\n\n\n<mask token>\n\n\ndef load_a_wh_target(gfe_path=GFE_PATH):\n csv_targetw = os.path.join(gfe_path, 'a_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetw)\n\n\ndef load_a_yn_data(gfe_path=GFE_PATH):\n csv_pathy = os.path.join(gfe_path, 'a_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathy)\n\n\ndef load_a_yn_target(gfe_path=GFE_PATH):\n csv_targety = os.path.join(gfe_path, 'a_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targety)\n\n\ndef load_b_affirm_data(gfe_path=GFE_PATH):\n csv_pathab = os.path.join(gfe_path, 'b_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathab)\n\n\n<mask token>\n\n\ndef load_b_cond_data(gfe_path=GFE_PATH):\n csv_pathcb = os.path.join(gfe_path, 'b_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathcb)\n\n\ndef load_b_cond_target(gfe_path=GFE_PATH):\n csv_targetcb = os.path.join(gfe_path, 'b_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetcb)\n\n\ndef load_b_doubtq_data(gfe_path=GFE_PATH):\n csv_pathdb = os.path.join(gfe_path, 'b_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathdb)\n\n\ndef load_b_doubtq_target(gfe_path=GFE_PATH):\n csv_targetdb = os.path.join(gfe_path, 'b_doubt_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetdb)\n\n\ndef load_b_emphasis_data(gfe_path=GFE_PATH):\n csv_patheb = os.path.join(gfe_path, 'b_emphasis_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patheb)\n\n\ndef load_b_emphasis_target(gfe_path=GFE_PATH):\n csv_targeteb = os.path.join(gfe_path, 'b_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeteb)\n\n\ndef load_b_neg_data(gfe_path=GFE_PATH):\n csv_pathnb = os.path.join(gfe_path, 'b_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathnb)\n\n\ndef load_b_neg_target(gfe_path=GFE_PATH):\n csv_targetnb = os.path.join(gfe_path, 'b_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetnb)\n\n\ndef load_b_rel_data(gfe_path=GFE_PATH):\n csv_pathrb = os.path.join(gfe_path, 'b_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathrb)\n\n\ndef load_b_rel_target(gfe_path=GFE_PATH):\n csv_targetrb = os.path.join(gfe_path, 'b_relative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetrb)\n\n\ndef load_b_topics_data(gfe_path=GFE_PATH):\n csv_pathtb = os.path.join(gfe_path, 'b_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathtb)\n\n\n<mask token>\n\n\ndef load_b_wh_target(gfe_path=GFE_PATH):\n csv_targetwb = os.path.join(gfe_path, 'b_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetwb)\n\n\ndef load_b_yn_data(gfe_path=GFE_PATH):\n csv_pathyb = os.path.join(gfe_path, 'b_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathyb)\n\n\ndef load_b_yn_target(gfe_path=GFE_PATH):\n csv_targetyb = os.path.join(gfe_path, 'b_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetyb)\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\ndef load_a_affirm_data(gfe_path=GFE_PATH):\n csv_patha = os.path.join(gfe_path, 'a_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patha)\n\n\ndef load_a_affirm_target(gfe_path=GFE_PATH):\n csv_targeta = os.path.join(gfe_path, 'a_affirmative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeta)\n\n\ndef load_a_cond_data(gfe_path=GFE_PATH):\n csv_pathc = os.path.join(gfe_path, 'a_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathc)\n\n\ndef load_a_cond_target(gfe_path=GFE_PATH):\n csv_targetc = os.path.join(gfe_path, 'a_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetc)\n\n\ndef load_a_doubtq_data(gfe_path=GFE_PATH):\n csv_pathd = os.path.join(gfe_path, 'a_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathd)\n\n\n<mask token>\n\n\ndef load_a_emphasis_data(gfe_path=GFE_PATH):\n csv_pathe = os.path.join(gfe_path, 'a_emphasis_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathe)\n\n\ndef load_a_emphasis_target(gfe_path=GFE_PATH):\n csv_targete = os.path.join(gfe_path, 'a_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targete)\n\n\ndef load_a_neg_data(gfe_path=GFE_PATH):\n csv_pathn = os.path.join(gfe_path, 'a_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathn)\n\n\ndef load_a_neg_target(gfe_path=GFE_PATH):\n csv_targetn = os.path.join(gfe_path, 'a_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetn)\n\n\ndef load_a_rel_data(gfe_path=GFE_PATH):\n csv_pathr = os.path.join(gfe_path, 'a_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathr)\n\n\ndef load_a_rel_target(gfe_path=GFE_PATH):\n csv_targetr = os.path.join(gfe_path, 'a_relative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetr)\n\n\ndef load_a_topics_data(gfe_path=GFE_PATH):\n csv_patht = os.path.join(gfe_path, 'a_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patht)\n\n\n<mask token>\n\n\ndef load_a_wh_target(gfe_path=GFE_PATH):\n csv_targetw = os.path.join(gfe_path, 'a_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetw)\n\n\ndef load_a_yn_data(gfe_path=GFE_PATH):\n csv_pathy = os.path.join(gfe_path, 'a_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathy)\n\n\ndef load_a_yn_target(gfe_path=GFE_PATH):\n csv_targety = os.path.join(gfe_path, 'a_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targety)\n\n\ndef load_b_affirm_data(gfe_path=GFE_PATH):\n csv_pathab = os.path.join(gfe_path, 'b_affirmative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathab)\n\n\n<mask token>\n\n\ndef load_b_cond_data(gfe_path=GFE_PATH):\n csv_pathcb = os.path.join(gfe_path, 'b_conditional_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathcb)\n\n\ndef load_b_cond_target(gfe_path=GFE_PATH):\n csv_targetcb = os.path.join(gfe_path, 'b_conditional_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetcb)\n\n\ndef load_b_doubtq_data(gfe_path=GFE_PATH):\n csv_pathdb = os.path.join(gfe_path, 'b_doubt_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathdb)\n\n\ndef load_b_doubtq_target(gfe_path=GFE_PATH):\n csv_targetdb = os.path.join(gfe_path, 'b_doubt_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetdb)\n\n\ndef load_b_emphasis_data(gfe_path=GFE_PATH):\n csv_patheb = os.path.join(gfe_path, 'b_emphasis_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_patheb)\n\n\ndef load_b_emphasis_target(gfe_path=GFE_PATH):\n csv_targeteb = os.path.join(gfe_path, 'b_emphasis_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targeteb)\n\n\ndef load_b_neg_data(gfe_path=GFE_PATH):\n csv_pathnb = os.path.join(gfe_path, 'b_negative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathnb)\n\n\ndef load_b_neg_target(gfe_path=GFE_PATH):\n csv_targetnb = os.path.join(gfe_path, 'b_negative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetnb)\n\n\ndef load_b_rel_data(gfe_path=GFE_PATH):\n csv_pathrb = os.path.join(gfe_path, 'b_relative_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathrb)\n\n\ndef load_b_rel_target(gfe_path=GFE_PATH):\n csv_targetrb = os.path.join(gfe_path, 'b_relative_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetrb)\n\n\ndef load_b_topics_data(gfe_path=GFE_PATH):\n csv_pathtb = os.path.join(gfe_path, 'b_topics_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathtb)\n\n\n<mask token>\n\n\ndef load_b_wh_target(gfe_path=GFE_PATH):\n csv_targetwb = os.path.join(gfe_path, 'b_wh_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetwb)\n\n\ndef load_b_yn_data(gfe_path=GFE_PATH):\n csv_pathyb = os.path.join(gfe_path, 'b_yn_question_datapoints.csv')\n print(gfe_path)\n return pd.read_csv(csv_pathyb)\n\n\ndef load_b_yn_target(gfe_path=GFE_PATH):\n csv_targetyb = os.path.join(gfe_path, 'b_yn_question_targets.csv')\n print(gfe_path)\n return pd.read_csv(csv_targetyb)\n\n\n<mask token>\n",
"step-5": "\n# coding: utf-8\n\n# In[1]:\n\n\nimport pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\n# In[2]:\n\n\nimport os\nGFE_PATH = \"C:\\Haely\\MS2017\\sem2\\EE 259\\Project\\grammatical_facial_expression\"\n\ndef load_a_affirm_data(gfe_path=GFE_PATH):\n csv_patha = os.path.join(gfe_path, \"a_affirmative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_patha)\ndef load_a_affirm_target(gfe_path=GFE_PATH):\n csv_targeta = os.path.join(gfe_path, \"a_affirmative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targeta)\n\ndef load_a_cond_data(gfe_path=GFE_PATH):\n csv_pathc = os.path.join(gfe_path, \"a_conditional_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathc)\ndef load_a_cond_target(gfe_path=GFE_PATH):\n csv_targetc = os.path.join(gfe_path, \"a_conditional_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetc)\n\ndef load_a_doubtq_data(gfe_path=GFE_PATH):\n csv_pathd = os.path.join(gfe_path, \"a_doubt_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathd)\ndef load_a_doubtq_target(gfe_path=GFE_PATH):\n csv_targetd = os.path.join(gfe_path, \"a_doubts_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetd)\n\ndef load_a_emphasis_data(gfe_path=GFE_PATH):\n csv_pathe = os.path.join(gfe_path, \"a_emphasis_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathe)\ndef load_a_emphasis_target(gfe_path=GFE_PATH):\n csv_targete = os.path.join(gfe_path, \"a_emphasis_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targete)\n\ndef load_a_neg_data(gfe_path=GFE_PATH):\n csv_pathn = os.path.join(gfe_path, \"a_negative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathn)\ndef load_a_neg_target(gfe_path=GFE_PATH):\n csv_targetn = os.path.join(gfe_path, \"a_negative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetn)\n\ndef load_a_rel_data(gfe_path=GFE_PATH):\n csv_pathr = os.path.join(gfe_path, \"a_relative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathr)\ndef load_a_rel_target(gfe_path=GFE_PATH):\n csv_targetr = os.path.join(gfe_path, \"a_relative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetr)\n\ndef load_a_topics_data(gfe_path=GFE_PATH):\n csv_patht = os.path.join(gfe_path, \"a_topics_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_patht)\ndef load_a_topics_target(gfe_path=GFE_PATH):\n csv_targett = os.path.join(gfe_path, \"a_topics_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targett)\n\ndef load_a_wh_data(gfe_path=GFE_PATH):\n csv_pathw = os.path.join(gfe_path, \"a_wh_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathw)\ndef load_a_wh_target(gfe_path=GFE_PATH):\n csv_targetw = os.path.join(gfe_path, \"a_wh_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetw)\n\ndef load_a_yn_data(gfe_path=GFE_PATH):\n csv_pathy = os.path.join(gfe_path, \"a_yn_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathy)\ndef load_a_yn_target(gfe_path=GFE_PATH):\n csv_targety = os.path.join(gfe_path, \"a_yn_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targety)\n\n\n# In[3]:\n\n\ndef load_b_affirm_data(gfe_path=GFE_PATH):\n csv_pathab = os.path.join(gfe_path, \"b_affirmative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathab)\ndef load_b_affirm_target(gfe_path=GFE_PATH):\n csv_targetab = os.path.join(gfe_path, \"b_affirmative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetab)\n\ndef load_b_cond_data(gfe_path=GFE_PATH):\n csv_pathcb = os.path.join(gfe_path, \"b_conditional_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathcb)\ndef load_b_cond_target(gfe_path=GFE_PATH):\n csv_targetcb = os.path.join(gfe_path, \"b_conditional_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetcb)\n\ndef load_b_doubtq_data(gfe_path=GFE_PATH):\n csv_pathdb = os.path.join(gfe_path, \"b_doubt_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathdb)\ndef load_b_doubtq_target(gfe_path=GFE_PATH):\n csv_targetdb = os.path.join(gfe_path, \"b_doubt_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetdb)\n\ndef load_b_emphasis_data(gfe_path=GFE_PATH):\n csv_patheb = os.path.join(gfe_path, \"b_emphasis_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_patheb)\ndef load_b_emphasis_target(gfe_path=GFE_PATH):\n csv_targeteb = os.path.join(gfe_path, \"b_emphasis_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targeteb)\n\ndef load_b_neg_data(gfe_path=GFE_PATH):\n csv_pathnb = os.path.join(gfe_path, \"b_negative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathnb)\ndef load_b_neg_target(gfe_path=GFE_PATH):\n csv_targetnb = os.path.join(gfe_path, \"b_negative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetnb)\n\ndef load_b_rel_data(gfe_path=GFE_PATH):\n csv_pathrb = os.path.join(gfe_path, \"b_relative_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathrb)\ndef load_b_rel_target(gfe_path=GFE_PATH):\n csv_targetrb = os.path.join(gfe_path, \"b_relative_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetrb)\n\ndef load_b_topics_data(gfe_path=GFE_PATH):\n csv_pathtb = os.path.join(gfe_path, \"b_topics_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathtb)\ndef load_b_topics_target(gfe_path=GFE_PATH):\n csv_targettb = os.path.join(gfe_path, \"b_topics_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targettb)\n\ndef load_b_wh_data(gfe_path=GFE_PATH):\n csv_pathwb = os.path.join(gfe_path, \"b_wh_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathwb)\ndef load_b_wh_target(gfe_path=GFE_PATH):\n csv_targetwb = os.path.join(gfe_path, \"b_wh_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetwb)\n\ndef load_b_yn_data(gfe_path=GFE_PATH):\n csv_pathyb = os.path.join(gfe_path, \"b_yn_question_datapoints.csv\")\n print(gfe_path)\n return pd.read_csv(csv_pathyb)\ndef load_b_yn_target(gfe_path=GFE_PATH):\n csv_targetyb = os.path.join(gfe_path, \"b_yn_question_targets.csv\")\n print(gfe_path)\n return pd.read_csv(csv_targetyb)\n\n\n# In[4]:\n\n\naffirmda = load_a_affirm_data()\naffirmta = load_a_affirm_target()\n\ncondda = load_a_cond_data()\ncondta = load_a_cond_target()\n\ndoubtqda = load_a_doubtq_data()\ndoubtqta = load_a_doubtq_target()\n\nemphda = load_a_emphasis_data()\nemphta = load_a_emphasis_target()\n\nnegda = load_a_neg_data()\nnegta = load_a_neg_target()\n\nrelda = load_a_rel_data()\nrelta = load_a_rel_target()\n\ntopicsda = load_a_topics_data()\ntopicsta = load_a_topics_target()\n\nwhda = load_a_wh_data()\nwhta = load_a_wh_target()\n\nynda = load_a_yn_data()\nynta = load_a_yn_target()\n\n\n# In[5]:\n\n\naffirmdb = load_b_affirm_data()\naffirmtb = load_b_affirm_target()\n\nconddb = load_b_cond_data()\ncondtb = load_b_cond_target()\n\ndoubtqdb = load_b_doubtq_data()\ndoubtqtb = load_b_doubtq_target()\n\nemphdb = load_b_emphasis_data()\nemphtb = load_b_emphasis_target()\n\nnegdb = load_b_neg_data()\nnegtb = load_b_neg_target()\n\nreldb = load_b_rel_data()\nreltb = load_b_rel_target()\n\ntopicsdb = load_b_topics_data()\ntopicstb = load_b_topics_target()\n\nwhdb = load_b_wh_data()\nwhtb = load_b_wh_target()\n\nyndb = load_b_yn_data()\nyntb = load_b_yn_target()\n\n\n# In[8]:\n\n\nusers_combine_affirmd = pd.concat([affirmda, affirmdb],ignore_index=True)\naffirm_y = pd.concat([affirmta,affirmtb],ignore_index=True)\n\nusers_combine_condd = pd.concat([condda, conddb],ignore_index=True)\ncond_y = pd.concat([condta, condtb],ignore_index=True)\n\nusers_combine_doubtqd = pd.concat([doubtqda, doubtqdb],ignore_index=True)\ndoubtq_y = pd.concat([doubtqta, doubtqtb],ignore_index=True)\n\nusers_combine_emphd = pd.concat([emphda, emphdb],ignore_index=True)\nemph_y = pd.concat([emphta, emphtb],ignore_index=True)\n\nusers_combine_negd = pd.concat([negda, negdb],ignore_index=True)\nneg_y = pd.concat([negta, negtb],ignore_index=True)\n\nusers_combine_reld = pd.concat([relda, reldb],ignore_index=True)\nrel_y = pd.concat([relta, reltb],ignore_index=True)\n\nusers_combine_topicsd = pd.concat([topicsda, topicsdb],ignore_index=True)\ntopics_y = pd.concat([topicsta, topicstb],ignore_index=True)\n\nusers_combine_whd = pd.concat([whda, whdb],ignore_index=True)\nwh_y = pd.concat([whta, whtb],ignore_index=True)\n\nusers_combine_ynd = pd.concat([ynda, yndb],ignore_index=True)\nyn_y = pd.concat([ynta, yntb],ignore_index=True)\n\n\n# In[11]:\n\n\nusers_combine_affirmd['affirm_y']=affirm_y\naffirm_y.drop([10]) \n\n\n\n# In[12]:\n\n\nusers_combine_condd['cond_y']=cond_y\ncond_y.drop([10]) \n\n\n# In[13]:\n\n\nusers_combine_doubtqd['doubtq_y']=doubtq_y\ndoubtq_y.drop([10]) \n\n\n# In[14]:\n\n\nusers_combine_emphd['emph_y']=emph_y\nemph_y.drop([10]) \n\n\n# In[15]:\n\n\nusers_combine_negd['neg_y']=neg_y\nneg_y.drop([10]) \n\n\n# In[16]:\n\n\nusers_combine_reld['rel_y']=rel_y\nrel_y.drop([10]) \n\n\n# In[17]:\n\n\nusers_combine_topicsd['topics_y']=topics_y\ntopics_y.drop([10]) \n\n\n# In[18]:\n\n\nusers_combine_whd['wh_y']=wh_y\nwh_y.drop([10]) \n\n\n# In[19]:\n\n\nusers_combine_ynd['yn_y']=yn_y\nyn_y.drop([10]) \n\n\n# In[22]:\n\n\nfrom sklearn.model_selection import train_test_split\nya=users_combine_affirmd['affirm_y']\nXa_train,Xa_test,ya_train,ya_test = train_test_split(users_combine_affirmd.iloc[:,1:],ya,stratify=ya)\n\nyc=users_combine_condd['cond_y']\nXc_train,Xc_test,yc_train,yc_test = train_test_split(users_combine_condd.iloc[:,1:],yc,stratify=yc)\n\nyd=users_combine_doubtqd['doubtq_y']\nXd_train,Xd_test,yd_train,yd_test = train_test_split(users_combine_doubtqd.iloc[:,1:],yd,stratify=yd)\n\nye=users_combine_emphd['emph_y']\nXe_train,Xe_test,ye_train,ye_test = train_test_split(users_combine_emphd.iloc[:,1:],ye,stratify=ye)\n\nyn=users_combine_negd['neg_y']\nXn_train,Xn_test,yn_train,yn_test = train_test_split(users_combine_negd.iloc[:,1:],yn,stratify=yn)\n\nyr=users_combine_reld['rel_y']\nXr_train,Xr_test,yr_train,yr_test = train_test_split(users_combine_reld.iloc[:,1:],yr,stratify=yr)\n\nyt=users_combine_topicsd['topics_y']\nXt_train,Xt_test,yt_train,yt_test = train_test_split(users_combine_topicsd.iloc[:,1:],yt,stratify=yt)\n\nyw=users_combine_whd['wh_y']\nXw_train,Xw_test,yw_train,yw_test = train_test_split(users_combine_whd.iloc[:,1:],yw,stratify=yw)\n\nyy=users_combine_ynd['yn_y']\nXy_train,Xy_test,yy_train,yy_test = train_test_split(users_combine_ynd.iloc[:,1:],yy,stratify=yy)\n\n\n\n# In[25]:\n\n\nfrom sklearn.preprocessing import scale\nfrom scipy import stats\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA\nlda_clf = LDA(solver='lsqr',store_covariance=True)\n\nlda_clf.fit(Xa_train,ya_train)\nya_predicted = lda_clf.predict(Xa_test)\nprint('\\n The error rate of the LDA model for affirm is {0:.2f}% '.format(100*np.mean(ya_predicted!=ya_test)))\n\nlda_clf.fit(Xc_train,yc_train)\nyc_predicted = lda_clf.predict(Xc_test)\nprint('\\n The error rate of the LDA model for conditional is {0:.2f}% '.format(100*np.mean(yc_predicted!=yc_test)))\n\nlda_clf.fit(Xd_train,yd_train)\nyd_predicted = lda_clf.predict(Xd_test)\nprint('\\n The error rate of the LDA model for doubt questions is {0:.2f}% '.format(100*np.mean(yd_predicted!=yd_test)))\n\nlda_clf.fit(Xe_train,ye_train)\nye_predicted = lda_clf.predict(Xe_test)\nprint('\\n The error rate of the LDA model for emphasis is {0:.2f}% '.format(100*np.mean(ye_predicted!=ye_test)))\n\nlda_clf.fit(Xn_train,yn_train)\nyn_predicted = lda_clf.predict(Xn_test)\nprint('\\n The error rate of the LDA model for negative is {0:.2f}% '.format(100*np.mean(yn_predicted!=yn_test)))\n\nlda_clf.fit(Xr_train,yr_train)\nyr_predicted = lda_clf.predict(Xr_test)\nprint('\\n The error rate of the LDA model for relativr is {0:.2f}% '.format(100*np.mean(yr_predicted!=yr_test)))\n\nlda_clf.fit(Xt_train,yt_train)\nyt_predicted = lda_clf.predict(Xt_test)\nprint('\\n The error rate of the LDA model for topics is {0:.2f}% '.format(100*np.mean(yt_predicted!=yt_test)))\n\nlda_clf.fit(Xw_train,yw_train)\nyw_predicted = lda_clf.predict(Xw_test)\nprint('\\n The error rate of the LDA model for wh questions is {0:.2f}% '.format(100*np.mean(yw_predicted!=yw_test)))\n\nlda_clf.fit(Xy_train,yy_train)\nyy_predicted = lda_clf.predict(Xy_test)\nprint('\\n The error rate of the LDA model for yes or no is {0:.2f}% '.format(100*np.mean(yy_predicted!=yy_test)))\n\n",
"step-ids": [
19,
27,
29,
30,
40
]
}
|
[
19,
27,
29,
30,
40
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class photoForm(forms.Form):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class photoForm(forms.Form):
iso = forms.ChoiceField(label='ISO', choices=[('100', 100), ('200', 200
), ('300', 300), ('400', 400), ('500', 500), ('600', 600), ('700',
700), ('800', 800)], initial=800)
shutterspeed = forms.FloatField(label='Shutter Speed', initial=6.0)
<|reserved_special_token_1|>
from django import forms
class photoForm(forms.Form):
iso = forms.ChoiceField(label='ISO', choices=[('100', 100), ('200', 200
), ('300', 300), ('400', 400), ('500', 500), ('600', 600), ('700',
700), ('800', 800)], initial=800)
shutterspeed = forms.FloatField(label='Shutter Speed', initial=6.0)
<|reserved_special_token_1|>
from django import forms
class photoForm(forms.Form):
iso = forms.ChoiceField(label='ISO', choices=[("100", 100),
("200", 200),
("300", 300),
("400", 400),
("500", 500),
("600", 600),
("700", 700),
("800", 800)], initial=800)
shutterspeed = forms.FloatField(label='Shutter Speed', initial=6.0)
|
flexible
|
{
"blob_id": "19b55b2de3d2ed16275cef572e3518fbb2457f84",
"index": 8293,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass photoForm(forms.Form):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass photoForm(forms.Form):\n iso = forms.ChoiceField(label='ISO', choices=[('100', 100), ('200', 200\n ), ('300', 300), ('400', 400), ('500', 500), ('600', 600), ('700', \n 700), ('800', 800)], initial=800)\n shutterspeed = forms.FloatField(label='Shutter Speed', initial=6.0)\n",
"step-4": "from django import forms\n\n\nclass photoForm(forms.Form):\n iso = forms.ChoiceField(label='ISO', choices=[('100', 100), ('200', 200\n ), ('300', 300), ('400', 400), ('500', 500), ('600', 600), ('700', \n 700), ('800', 800)], initial=800)\n shutterspeed = forms.FloatField(label='Shutter Speed', initial=6.0)\n",
"step-5": "from django import forms\n\nclass photoForm(forms.Form):\n iso = forms.ChoiceField(label='ISO', choices=[(\"100\", 100),\n (\"200\", 200),\n (\"300\", 300),\n (\"400\", 400),\n (\"500\", 500),\n (\"600\", 600),\n (\"700\", 700),\n (\"800\", 800)], initial=800)\n shutterspeed = forms.FloatField(label='Shutter Speed', initial=6.0)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_convert_nc_2010_to_na_2310():
ffi_in, ffi_out = 2010, 2310
infile = os.path.join(cached_outputs, f'{ffi_in}.nc')
outfile = os.path.join(test_outputs, f'{ffi_out}_from_nc_{ffi_in}.na')
x = nappy.nc_interface.nc_to_na.NCToNA(infile, requested_ffi=ffi_out)
x.writeNAFiles(outfile, delimiter=',', float_format='%g')
<|reserved_special_token_1|>
import os
from .common import cached_outputs, data_files, test_outputs
import nappy.nc_interface.na_to_nc
import nappy.nc_interface.nc_to_na
def test_convert_nc_2010_to_na_2310():
ffi_in, ffi_out = 2010, 2310
infile = os.path.join(cached_outputs, f'{ffi_in}.nc')
outfile = os.path.join(test_outputs, f'{ffi_out}_from_nc_{ffi_in}.na')
x = nappy.nc_interface.nc_to_na.NCToNA(infile, requested_ffi=ffi_out)
x.writeNAFiles(outfile, delimiter=',', float_format='%g')
<|reserved_special_token_1|>
import os
from .common import cached_outputs, data_files, test_outputs
import nappy.nc_interface.na_to_nc
import nappy.nc_interface.nc_to_na
def test_convert_nc_2010_to_na_2310():
ffi_in, ffi_out = (2010, 2310)
infile = os.path.join(cached_outputs, f"{ffi_in}.nc")
outfile = os.path.join(test_outputs, f"{ffi_out}_from_nc_{ffi_in}.na")
# Reading: infile
x = nappy.nc_interface.nc_to_na.NCToNA(infile, requested_ffi=ffi_out)
# Writing: outfile
x.writeNAFiles(outfile, delimiter=",", float_format="%g")
|
flexible
|
{
"blob_id": "0de657ee173b606ad61d614a6168c00fcd571a70",
"index": 74,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_convert_nc_2010_to_na_2310():\n ffi_in, ffi_out = 2010, 2310\n infile = os.path.join(cached_outputs, f'{ffi_in}.nc')\n outfile = os.path.join(test_outputs, f'{ffi_out}_from_nc_{ffi_in}.na')\n x = nappy.nc_interface.nc_to_na.NCToNA(infile, requested_ffi=ffi_out)\n x.writeNAFiles(outfile, delimiter=',', float_format='%g')\n",
"step-3": "import os\nfrom .common import cached_outputs, data_files, test_outputs\nimport nappy.nc_interface.na_to_nc\nimport nappy.nc_interface.nc_to_na\n\n\ndef test_convert_nc_2010_to_na_2310():\n ffi_in, ffi_out = 2010, 2310\n infile = os.path.join(cached_outputs, f'{ffi_in}.nc')\n outfile = os.path.join(test_outputs, f'{ffi_out}_from_nc_{ffi_in}.na')\n x = nappy.nc_interface.nc_to_na.NCToNA(infile, requested_ffi=ffi_out)\n x.writeNAFiles(outfile, delimiter=',', float_format='%g')\n",
"step-4": "import os\n\nfrom .common import cached_outputs, data_files, test_outputs\n\nimport nappy.nc_interface.na_to_nc\nimport nappy.nc_interface.nc_to_na\n\n\ndef test_convert_nc_2010_to_na_2310():\n ffi_in, ffi_out = (2010, 2310)\n\n infile = os.path.join(cached_outputs, f\"{ffi_in}.nc\")\n outfile = os.path.join(test_outputs, f\"{ffi_out}_from_nc_{ffi_in}.na\")\n\n # Reading: infile\n x = nappy.nc_interface.nc_to_na.NCToNA(infile, requested_ffi=ffi_out)\n\n # Writing: outfile\n x.writeNAFiles(outfile, delimiter=\",\", float_format=\"%g\")\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class Adam(Optimizer):
<|reserved_special_token_0|>
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08
) ->None:
from bigdl.dllib.optim.optimizer import Adam as BAdam
self.optimizer = BAdam(learningrate, learningrate_decay, beta1,
beta2, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adam':
return self.optimizer
class ParallelAdam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> pAdam = ParallelAdam()
creating: createParallelAdam
"""
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08,
parallel_num: int=-1) ->None:
from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam
self.optimizer = BParallelAdam(learningrate, learningrate_decay,
beta1, beta2, epsilon, parallel_num, bigdl_type='float')
def get_optimizer(self) ->'optimizer.ParallelAdam':
return self.optimizer
class Ftrl(Optimizer):
"""
An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.
Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.
:param learningrate learning rate
:param learningrate_power double, must be less or equal to zero. Default is -0.5.
:param initial_accumulator_value double, the starting value for accumulators,
require zero or positive values.
:param l1_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_shrinkage_regularization_strength double, must be greater or equal to zero.
Default is zero. This differs from l2RegularizationStrength above. L2 above is a
stabilization penalty, whereas this one is a magnitude penalty.
>>> ftrl = Ftrl()
creating: createFtrl
>>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)
creating: createFtrl
"""
def __init__(self, learningrate: float=0.001, learningrate_power: float
=-0.5, initial_accumulator_value: float=0.1,
l1_regularization_strength: float=0.0, l2_regularization_strength:
float=0.0, l2_shrinkage_regularization_strength: float=0.0) ->None:
from bigdl.dllib.optim.optimizer import Ftrl as BFtrl
self.optimizer = BFtrl(learningrate, learningrate_power,
initial_accumulator_value, l1_regularization_strength,
l2_regularization_strength,
l2_shrinkage_regularization_strength, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Ftrl':
return self.optimizer
class Adamax(Optimizer):
"""
An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adagrad = Adamax()
creating: createAdamax
"""
def __init__(self, learningrate: float=0.002, beta1: float=0.9, beta2:
float=0.999, epsilon: float=1e-38) ->None:
from bigdl.dllib.optim.optimizer import Adamax as BAdamax
self.optimizer = BAdamax(learningrate, beta1, beta2, epsilon,
bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adamax':
return self.optimizer
class RMSprop(Optimizer):
"""
An implementation of RMSprop
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param decayrate decay rate, also called rho
:param epsilon for numerical stability
>>> adagrad = RMSprop()
creating: createRMSprop
"""
def __init__(self, learningrate: float=0.01, learningrate_decay: float=
0.0, decayrate: float=0.99, epsilon: float=1e-08) ->None:
from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop
self.optimizer = BRMSprop(learningrate, learningrate_decay,
decayrate, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.RMSprop':
return self.optimizer
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Adagrad(Optimizer):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class LBFGS(Optimizer):
"""
This implementation of L-BFGS relies on a user-provided line
search function (state.lineSearch). If this function is not
provided, then a simple learningRate is used to produce fixed
size steps. Fixed size steps are much less costly than line
searches, and can be useful for stochastic problems.
The learning rate is used even when a line search is provided.
This is also useful for large-scale stochastic problems, where
opfunc is a noisy approximation of f(x). In that case, the learning
rate allows a reduction of confidence in the step size.
:param max_iter Maximum number of iterations allowed
:param max_eval Maximum number of function evaluations
:param tolfun Termination tolerance on the first-order optimality
:param tolx Termination tol on progress in terms of func/param changes
:param ncorrection
:param learningrate
:param verbose
:param linesearch A line search function
:param linesearch_options If no line search provided, then a fixed step size is used
>>> lbfgs = LBFGS()
creating: createLBFGS
"""
def __init__(self, max_iter: int=20, max_eval: float=DOUBLEMAX, tolfun:
float=1e-05, tolx: float=1e-09, ncorrection: int=100, learningrate:
float=1.0, verbose: bool=False, linesearch: Any=None,
linesearch_options: Optional[Dict[Any, Any]]=None) ->None:
from bigdl.dllib.optim.optimizer import LBFGS as BLBFGS
self.optimizer = BLBFGS(max_iter, max_eval, tolfun, tolx,
ncorrection, learningrate, verbose, linesearch,
linesearch_options, bigdl_type='float')
def get_optimizer(self) ->'optimizer.LBFGS':
return self.optimizer
class Adadelta(Optimizer):
"""
Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701
:param decayrate interpolation parameter rho
:param epsilon for numerical stability
>>> adagrad = Adadelta()
creating: createAdadelta
"""
def __init__(self, decayrate: float=0.9, epsilon: float=1e-10) ->None:
from bigdl.dllib.optim.optimizer import Adadelta as BAdadelta
self.optimizer = BAdadelta(decayrate, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adadelta':
return self.optimizer
class Adam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adam = Adam()
creating: createAdam
"""
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08
) ->None:
from bigdl.dllib.optim.optimizer import Adam as BAdam
self.optimizer = BAdam(learningrate, learningrate_decay, beta1,
beta2, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adam':
return self.optimizer
class ParallelAdam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> pAdam = ParallelAdam()
creating: createParallelAdam
"""
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08,
parallel_num: int=-1) ->None:
from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam
self.optimizer = BParallelAdam(learningrate, learningrate_decay,
beta1, beta2, epsilon, parallel_num, bigdl_type='float')
def get_optimizer(self) ->'optimizer.ParallelAdam':
return self.optimizer
class Ftrl(Optimizer):
"""
An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.
Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.
:param learningrate learning rate
:param learningrate_power double, must be less or equal to zero. Default is -0.5.
:param initial_accumulator_value double, the starting value for accumulators,
require zero or positive values.
:param l1_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_shrinkage_regularization_strength double, must be greater or equal to zero.
Default is zero. This differs from l2RegularizationStrength above. L2 above is a
stabilization penalty, whereas this one is a magnitude penalty.
>>> ftrl = Ftrl()
creating: createFtrl
>>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)
creating: createFtrl
"""
def __init__(self, learningrate: float=0.001, learningrate_power: float
=-0.5, initial_accumulator_value: float=0.1,
l1_regularization_strength: float=0.0, l2_regularization_strength:
float=0.0, l2_shrinkage_regularization_strength: float=0.0) ->None:
from bigdl.dllib.optim.optimizer import Ftrl as BFtrl
self.optimizer = BFtrl(learningrate, learningrate_power,
initial_accumulator_value, l1_regularization_strength,
l2_regularization_strength,
l2_shrinkage_regularization_strength, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Ftrl':
return self.optimizer
class Adamax(Optimizer):
"""
An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adagrad = Adamax()
creating: createAdamax
"""
def __init__(self, learningrate: float=0.002, beta1: float=0.9, beta2:
float=0.999, epsilon: float=1e-38) ->None:
from bigdl.dllib.optim.optimizer import Adamax as BAdamax
self.optimizer = BAdamax(learningrate, beta1, beta2, epsilon,
bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adamax':
return self.optimizer
class RMSprop(Optimizer):
"""
An implementation of RMSprop
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param decayrate decay rate, also called rho
:param epsilon for numerical stability
>>> adagrad = RMSprop()
creating: createRMSprop
"""
def __init__(self, learningrate: float=0.01, learningrate_decay: float=
0.0, decayrate: float=0.99, epsilon: float=1e-08) ->None:
from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop
self.optimizer = BRMSprop(learningrate, learningrate_decay,
decayrate, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.RMSprop':
return self.optimizer
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SGD(Optimizer):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class Adagrad(Optimizer):
"""
An implementation of Adagrad. See the original paper:
http://jmlr.org/papers/volume12/duchi11a/duchi11a.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param weightdecay weight decay
>>> adagrad = Adagrad()
creating: createAdagrad
"""
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, weightdecay: float=0.0) ->None:
from bigdl.dllib.optim.optimizer import Adagrad as BAdagrad
self.optimizer = BAdagrad(learningrate, learningrate_decay,
weightdecay, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adagrad':
return self.optimizer
class LBFGS(Optimizer):
"""
This implementation of L-BFGS relies on a user-provided line
search function (state.lineSearch). If this function is not
provided, then a simple learningRate is used to produce fixed
size steps. Fixed size steps are much less costly than line
searches, and can be useful for stochastic problems.
The learning rate is used even when a line search is provided.
This is also useful for large-scale stochastic problems, where
opfunc is a noisy approximation of f(x). In that case, the learning
rate allows a reduction of confidence in the step size.
:param max_iter Maximum number of iterations allowed
:param max_eval Maximum number of function evaluations
:param tolfun Termination tolerance on the first-order optimality
:param tolx Termination tol on progress in terms of func/param changes
:param ncorrection
:param learningrate
:param verbose
:param linesearch A line search function
:param linesearch_options If no line search provided, then a fixed step size is used
>>> lbfgs = LBFGS()
creating: createLBFGS
"""
def __init__(self, max_iter: int=20, max_eval: float=DOUBLEMAX, tolfun:
float=1e-05, tolx: float=1e-09, ncorrection: int=100, learningrate:
float=1.0, verbose: bool=False, linesearch: Any=None,
linesearch_options: Optional[Dict[Any, Any]]=None) ->None:
from bigdl.dllib.optim.optimizer import LBFGS as BLBFGS
self.optimizer = BLBFGS(max_iter, max_eval, tolfun, tolx,
ncorrection, learningrate, verbose, linesearch,
linesearch_options, bigdl_type='float')
def get_optimizer(self) ->'optimizer.LBFGS':
return self.optimizer
class Adadelta(Optimizer):
"""
Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701
:param decayrate interpolation parameter rho
:param epsilon for numerical stability
>>> adagrad = Adadelta()
creating: createAdadelta
"""
def __init__(self, decayrate: float=0.9, epsilon: float=1e-10) ->None:
from bigdl.dllib.optim.optimizer import Adadelta as BAdadelta
self.optimizer = BAdadelta(decayrate, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adadelta':
return self.optimizer
class Adam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adam = Adam()
creating: createAdam
"""
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08
) ->None:
from bigdl.dllib.optim.optimizer import Adam as BAdam
self.optimizer = BAdam(learningrate, learningrate_decay, beta1,
beta2, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adam':
return self.optimizer
class ParallelAdam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> pAdam = ParallelAdam()
creating: createParallelAdam
"""
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08,
parallel_num: int=-1) ->None:
from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam
self.optimizer = BParallelAdam(learningrate, learningrate_decay,
beta1, beta2, epsilon, parallel_num, bigdl_type='float')
def get_optimizer(self) ->'optimizer.ParallelAdam':
return self.optimizer
class Ftrl(Optimizer):
"""
An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.
Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.
:param learningrate learning rate
:param learningrate_power double, must be less or equal to zero. Default is -0.5.
:param initial_accumulator_value double, the starting value for accumulators,
require zero or positive values.
:param l1_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_shrinkage_regularization_strength double, must be greater or equal to zero.
Default is zero. This differs from l2RegularizationStrength above. L2 above is a
stabilization penalty, whereas this one is a magnitude penalty.
>>> ftrl = Ftrl()
creating: createFtrl
>>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)
creating: createFtrl
"""
def __init__(self, learningrate: float=0.001, learningrate_power: float
=-0.5, initial_accumulator_value: float=0.1,
l1_regularization_strength: float=0.0, l2_regularization_strength:
float=0.0, l2_shrinkage_regularization_strength: float=0.0) ->None:
from bigdl.dllib.optim.optimizer import Ftrl as BFtrl
self.optimizer = BFtrl(learningrate, learningrate_power,
initial_accumulator_value, l1_regularization_strength,
l2_regularization_strength,
l2_shrinkage_regularization_strength, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Ftrl':
return self.optimizer
class Adamax(Optimizer):
"""
An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adagrad = Adamax()
creating: createAdamax
"""
def __init__(self, learningrate: float=0.002, beta1: float=0.9, beta2:
float=0.999, epsilon: float=1e-38) ->None:
from bigdl.dllib.optim.optimizer import Adamax as BAdamax
self.optimizer = BAdamax(learningrate, beta1, beta2, epsilon,
bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adamax':
return self.optimizer
class RMSprop(Optimizer):
"""
An implementation of RMSprop
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param decayrate decay rate, also called rho
:param epsilon for numerical stability
>>> adagrad = RMSprop()
creating: createRMSprop
"""
def __init__(self, learningrate: float=0.01, learningrate_decay: float=
0.0, decayrate: float=0.99, epsilon: float=1e-08) ->None:
from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop
self.optimizer = BRMSprop(learningrate, learningrate_decay,
decayrate, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.RMSprop':
return self.optimizer
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class SGD(Optimizer):
<|reserved_special_token_0|>
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, weightdecay: float=0.0, momentum: float=0.0, dampening: float
=DOUBLEMAX, nesterov: bool=False, learningrate_schedule: Optional[
'Scheduler']=None, learningrates: Optional['np.ndarray']=None,
weightdecays: Optional['np.ndarray']=None) ->None:
from bigdl.dllib.optim.optimizer import SGD as BSGD
invalidInputError(isinstance(learningrate_schedule, Scheduler),
'learningrate_schedule should be an bigdl.orca.learn.optimizers.schedule.Scheduler, but got {learningrate_schedule}'
)
self.optimizer = BSGD(learningrate, learningrate_decay, weightdecay,
momentum, dampening, nesterov, learningrate_schedule.
get_scheduler(), learningrates, weightdecays, bigdl_type='float')
def get_optimizer(self) ->'optimizer.SGD':
return self.optimizer
class Adagrad(Optimizer):
"""
An implementation of Adagrad. See the original paper:
http://jmlr.org/papers/volume12/duchi11a/duchi11a.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param weightdecay weight decay
>>> adagrad = Adagrad()
creating: createAdagrad
"""
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, weightdecay: float=0.0) ->None:
from bigdl.dllib.optim.optimizer import Adagrad as BAdagrad
self.optimizer = BAdagrad(learningrate, learningrate_decay,
weightdecay, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adagrad':
return self.optimizer
class LBFGS(Optimizer):
"""
This implementation of L-BFGS relies on a user-provided line
search function (state.lineSearch). If this function is not
provided, then a simple learningRate is used to produce fixed
size steps. Fixed size steps are much less costly than line
searches, and can be useful for stochastic problems.
The learning rate is used even when a line search is provided.
This is also useful for large-scale stochastic problems, where
opfunc is a noisy approximation of f(x). In that case, the learning
rate allows a reduction of confidence in the step size.
:param max_iter Maximum number of iterations allowed
:param max_eval Maximum number of function evaluations
:param tolfun Termination tolerance on the first-order optimality
:param tolx Termination tol on progress in terms of func/param changes
:param ncorrection
:param learningrate
:param verbose
:param linesearch A line search function
:param linesearch_options If no line search provided, then a fixed step size is used
>>> lbfgs = LBFGS()
creating: createLBFGS
"""
def __init__(self, max_iter: int=20, max_eval: float=DOUBLEMAX, tolfun:
float=1e-05, tolx: float=1e-09, ncorrection: int=100, learningrate:
float=1.0, verbose: bool=False, linesearch: Any=None,
linesearch_options: Optional[Dict[Any, Any]]=None) ->None:
from bigdl.dllib.optim.optimizer import LBFGS as BLBFGS
self.optimizer = BLBFGS(max_iter, max_eval, tolfun, tolx,
ncorrection, learningrate, verbose, linesearch,
linesearch_options, bigdl_type='float')
def get_optimizer(self) ->'optimizer.LBFGS':
return self.optimizer
class Adadelta(Optimizer):
"""
Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701
:param decayrate interpolation parameter rho
:param epsilon for numerical stability
>>> adagrad = Adadelta()
creating: createAdadelta
"""
def __init__(self, decayrate: float=0.9, epsilon: float=1e-10) ->None:
from bigdl.dllib.optim.optimizer import Adadelta as BAdadelta
self.optimizer = BAdadelta(decayrate, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adadelta':
return self.optimizer
class Adam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adam = Adam()
creating: createAdam
"""
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08
) ->None:
from bigdl.dllib.optim.optimizer import Adam as BAdam
self.optimizer = BAdam(learningrate, learningrate_decay, beta1,
beta2, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adam':
return self.optimizer
class ParallelAdam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> pAdam = ParallelAdam()
creating: createParallelAdam
"""
def __init__(self, learningrate: float=0.001, learningrate_decay: float
=0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08,
parallel_num: int=-1) ->None:
from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam
self.optimizer = BParallelAdam(learningrate, learningrate_decay,
beta1, beta2, epsilon, parallel_num, bigdl_type='float')
def get_optimizer(self) ->'optimizer.ParallelAdam':
return self.optimizer
class Ftrl(Optimizer):
"""
An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.
Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.
:param learningrate learning rate
:param learningrate_power double, must be less or equal to zero. Default is -0.5.
:param initial_accumulator_value double, the starting value for accumulators,
require zero or positive values.
:param l1_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_shrinkage_regularization_strength double, must be greater or equal to zero.
Default is zero. This differs from l2RegularizationStrength above. L2 above is a
stabilization penalty, whereas this one is a magnitude penalty.
>>> ftrl = Ftrl()
creating: createFtrl
>>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)
creating: createFtrl
"""
def __init__(self, learningrate: float=0.001, learningrate_power: float
=-0.5, initial_accumulator_value: float=0.1,
l1_regularization_strength: float=0.0, l2_regularization_strength:
float=0.0, l2_shrinkage_regularization_strength: float=0.0) ->None:
from bigdl.dllib.optim.optimizer import Ftrl as BFtrl
self.optimizer = BFtrl(learningrate, learningrate_power,
initial_accumulator_value, l1_regularization_strength,
l2_regularization_strength,
l2_shrinkage_regularization_strength, bigdl_type='float')
def get_optimizer(self) ->'optimizer.Ftrl':
return self.optimizer
class Adamax(Optimizer):
"""
An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adagrad = Adamax()
creating: createAdamax
"""
def __init__(self, learningrate: float=0.002, beta1: float=0.9, beta2:
float=0.999, epsilon: float=1e-38) ->None:
from bigdl.dllib.optim.optimizer import Adamax as BAdamax
self.optimizer = BAdamax(learningrate, beta1, beta2, epsilon,
bigdl_type='float')
def get_optimizer(self) ->'optimizer.Adamax':
return self.optimizer
class RMSprop(Optimizer):
"""
An implementation of RMSprop
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param decayrate decay rate, also called rho
:param epsilon for numerical stability
>>> adagrad = RMSprop()
creating: createRMSprop
"""
def __init__(self, learningrate: float=0.01, learningrate_decay: float=
0.0, decayrate: float=0.99, epsilon: float=1e-08) ->None:
from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop
self.optimizer = BRMSprop(learningrate, learningrate_decay,
decayrate, epsilon, bigdl_type='float')
def get_optimizer(self) ->'optimizer.RMSprop':
return self.optimizer
<|reserved_special_token_1|>
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from abc import ABC, abstractmethod
from bigdl.dllib.utils.common import DOUBLEMAX
from bigdl.orca.learn.optimizers.schedule import Scheduler
from bigdl.dllib.utils.log4Error import invalidInputError
from typing import (Any, Optional, Dict, TYPE_CHECKING)
if TYPE_CHECKING:
from bigdl.dllib.optim import optimizer
import numpy as np
class Optimizer(ABC):
@abstractmethod
def get_optimizer(self):
pass
class SGD(Optimizer):
"""
A plain implementation of SGD
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param weightdecay weight decay
:param momentum momentum
:param dampening dampening for momentum
:param nesterov enables Nesterov momentum
:param learningrates 1D tensor of individual learning rates
:param weightdecays 1D tensor of individual weight decays
>>> sgd = SGD()
creating: createDefault
creating: createSGD
"""
def __init__(self,
learningrate: float = 1e-3,
learningrate_decay: float = 0.0,
weightdecay: float = 0.0,
momentum: float = 0.0,
dampening: float = DOUBLEMAX,
nesterov: bool = False,
learningrate_schedule: Optional["Scheduler"] = None,
learningrates: Optional["np.ndarray"] = None,
weightdecays: Optional["np.ndarray"] = None) -> None:
from bigdl.dllib.optim.optimizer import SGD as BSGD
invalidInputError(isinstance(learningrate_schedule, Scheduler),
"learningrate_schedule should be an "
"bigdl.orca.learn.optimizers.schedule.Scheduler,"
" but got {learningrate_schedule}")
self.optimizer = BSGD(learningrate,
learningrate_decay,
weightdecay,
momentum,
dampening,
nesterov,
learningrate_schedule.get_scheduler(), # type: ignore
learningrates,
weightdecays,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.SGD":
return self.optimizer
class Adagrad(Optimizer):
"""
An implementation of Adagrad. See the original paper:
http://jmlr.org/papers/volume12/duchi11a/duchi11a.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param weightdecay weight decay
>>> adagrad = Adagrad()
creating: createAdagrad
"""
def __init__(self,
learningrate: float = 1e-3,
learningrate_decay: float = 0.0,
weightdecay: float = 0.0) -> None:
from bigdl.dllib.optim.optimizer import Adagrad as BAdagrad
self.optimizer = BAdagrad(learningrate, learningrate_decay,
weightdecay, bigdl_type="float")
def get_optimizer(self) -> "optimizer.Adagrad":
return self.optimizer
class LBFGS(Optimizer):
"""
This implementation of L-BFGS relies on a user-provided line
search function (state.lineSearch). If this function is not
provided, then a simple learningRate is used to produce fixed
size steps. Fixed size steps are much less costly than line
searches, and can be useful for stochastic problems.
The learning rate is used even when a line search is provided.
This is also useful for large-scale stochastic problems, where
opfunc is a noisy approximation of f(x). In that case, the learning
rate allows a reduction of confidence in the step size.
:param max_iter Maximum number of iterations allowed
:param max_eval Maximum number of function evaluations
:param tolfun Termination tolerance on the first-order optimality
:param tolx Termination tol on progress in terms of func/param changes
:param ncorrection
:param learningrate
:param verbose
:param linesearch A line search function
:param linesearch_options If no line search provided, then a fixed step size is used
>>> lbfgs = LBFGS()
creating: createLBFGS
"""
def __init__(self,
max_iter: int = 20,
max_eval: float = DOUBLEMAX,
tolfun: float = 1e-5,
tolx: float = 1e-9,
ncorrection: int = 100,
learningrate: float = 1.0,
verbose: bool = False,
linesearch: Any = None,
linesearch_options: Optional[Dict[Any, Any]]=None) -> None:
from bigdl.dllib.optim.optimizer import LBFGS as BLBFGS
self.optimizer = BLBFGS(
max_iter,
max_eval,
tolfun,
tolx,
ncorrection,
learningrate,
verbose,
linesearch,
linesearch_options,
bigdl_type="float"
)
def get_optimizer(self) -> "optimizer.LBFGS":
return self.optimizer
class Adadelta(Optimizer):
"""
Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701
:param decayrate interpolation parameter rho
:param epsilon for numerical stability
>>> adagrad = Adadelta()
creating: createAdadelta
"""
def __init__(self,
decayrate: float = 0.9,
epsilon: float = 1e-10) -> None:
from bigdl.dllib.optim.optimizer import Adadelta as BAdadelta
self.optimizer = BAdadelta(decayrate,
epsilon,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.Adadelta":
return self.optimizer
class Adam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adam = Adam()
creating: createAdam
"""
def __init__(self,
learningrate: float = 1e-3,
learningrate_decay: float = 0.0,
beta1: float = 0.9,
beta2: float = 0.999,
epsilon: float = 1e-8) -> None:
from bigdl.dllib.optim.optimizer import Adam as BAdam
self.optimizer = BAdam(learningrate,
learningrate_decay,
beta1,
beta2,
epsilon,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.Adam":
return self.optimizer
class ParallelAdam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> pAdam = ParallelAdam()
creating: createParallelAdam
"""
def __init__(self,
learningrate: float = 1e-3,
learningrate_decay: float = 0.0,
beta1: float = 0.9,
beta2: float = 0.999,
epsilon: float = 1e-8,
parallel_num: int = -1) -> None:
from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam
self.optimizer = BParallelAdam(learningrate,
learningrate_decay,
beta1,
beta2,
epsilon,
parallel_num,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.ParallelAdam":
return self.optimizer
class Ftrl(Optimizer):
"""
An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.
Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.
:param learningrate learning rate
:param learningrate_power double, must be less or equal to zero. Default is -0.5.
:param initial_accumulator_value double, the starting value for accumulators,
require zero or positive values.
:param l1_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_shrinkage_regularization_strength double, must be greater or equal to zero.
Default is zero. This differs from l2RegularizationStrength above. L2 above is a
stabilization penalty, whereas this one is a magnitude penalty.
>>> ftrl = Ftrl()
creating: createFtrl
>>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)
creating: createFtrl
"""
def __init__(self,
learningrate: float = 1e-3,
learningrate_power: float = -0.5,
initial_accumulator_value: float = 0.1,
l1_regularization_strength: float = 0.0,
l2_regularization_strength: float = 0.0,
l2_shrinkage_regularization_strength: float = 0.0) -> None:
from bigdl.dllib.optim.optimizer import Ftrl as BFtrl
self.optimizer = BFtrl(learningrate,
learningrate_power,
initial_accumulator_value,
l1_regularization_strength,
l2_regularization_strength,
l2_shrinkage_regularization_strength,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.Ftrl":
return self.optimizer
class Adamax(Optimizer):
"""
An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adagrad = Adamax()
creating: createAdamax
"""
def __init__(self,
learningrate: float = 0.002,
beta1: float = 0.9,
beta2: float = 0.999,
epsilon: float = 1e-38) -> None:
from bigdl.dllib.optim.optimizer import Adamax as BAdamax
self.optimizer = BAdamax(learningrate,
beta1,
beta2,
epsilon,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.Adamax":
return self.optimizer
class RMSprop(Optimizer):
"""
An implementation of RMSprop
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param decayrate decay rate, also called rho
:param epsilon for numerical stability
>>> adagrad = RMSprop()
creating: createRMSprop
"""
def __init__(self,
learningrate: float = 1e-2,
learningrate_decay: float = 0.0,
decayrate: float = 0.99,
epsilon: float = 1e-8) -> None:
from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop
self.optimizer = BRMSprop(learningrate,
learningrate_decay,
decayrate,
epsilon,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.RMSprop":
return self.optimizer
|
flexible
|
{
"blob_id": "ce69f7b7cf8c38845bfe589c83fdd6e43ab50912",
"index": 3708,
"step-1": "<mask token>\n\n\nclass Adam(Optimizer):\n <mask token>\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08\n ) ->None:\n from bigdl.dllib.optim.optimizer import Adam as BAdam\n self.optimizer = BAdam(learningrate, learningrate_decay, beta1,\n beta2, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adam':\n return self.optimizer\n\n\nclass ParallelAdam(Optimizer):\n \"\"\"\n An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> pAdam = ParallelAdam()\n creating: createParallelAdam\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08,\n parallel_num: int=-1) ->None:\n from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam\n self.optimizer = BParallelAdam(learningrate, learningrate_decay,\n beta1, beta2, epsilon, parallel_num, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.ParallelAdam':\n return self.optimizer\n\n\nclass Ftrl(Optimizer):\n \"\"\"\n An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.\n Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.\n\n :param learningrate learning rate\n :param learningrate_power double, must be less or equal to zero. Default is -0.5.\n :param initial_accumulator_value double, the starting value for accumulators,\n require zero or positive values.\n :param l1_regularization_strength double, must be greater or equal to zero. Default is zero.\n :param l2_regularization_strength double, must be greater or equal to zero. Default is zero.\n :param l2_shrinkage_regularization_strength double, must be greater or equal to zero.\n Default is zero. This differs from l2RegularizationStrength above. L2 above is a\n stabilization penalty, whereas this one is a magnitude penalty.\n >>> ftrl = Ftrl()\n creating: createFtrl\n >>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)\n creating: createFtrl\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_power: float\n =-0.5, initial_accumulator_value: float=0.1,\n l1_regularization_strength: float=0.0, l2_regularization_strength:\n float=0.0, l2_shrinkage_regularization_strength: float=0.0) ->None:\n from bigdl.dllib.optim.optimizer import Ftrl as BFtrl\n self.optimizer = BFtrl(learningrate, learningrate_power,\n initial_accumulator_value, l1_regularization_strength,\n l2_regularization_strength,\n l2_shrinkage_regularization_strength, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Ftrl':\n return self.optimizer\n\n\nclass Adamax(Optimizer):\n \"\"\"\n An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> adagrad = Adamax()\n creating: createAdamax\n \"\"\"\n\n def __init__(self, learningrate: float=0.002, beta1: float=0.9, beta2:\n float=0.999, epsilon: float=1e-38) ->None:\n from bigdl.dllib.optim.optimizer import Adamax as BAdamax\n self.optimizer = BAdamax(learningrate, beta1, beta2, epsilon,\n bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adamax':\n return self.optimizer\n\n\nclass RMSprop(Optimizer):\n \"\"\"\n An implementation of RMSprop\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param decayrate decay rate, also called rho\n :param epsilon for numerical stability\n >>> adagrad = RMSprop()\n creating: createRMSprop\n \"\"\"\n\n def __init__(self, learningrate: float=0.01, learningrate_decay: float=\n 0.0, decayrate: float=0.99, epsilon: float=1e-08) ->None:\n from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop\n self.optimizer = BRMSprop(learningrate, learningrate_decay,\n decayrate, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.RMSprop':\n return self.optimizer\n",
"step-2": "<mask token>\n\n\nclass Adagrad(Optimizer):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass LBFGS(Optimizer):\n \"\"\"\n This implementation of L-BFGS relies on a user-provided line\n search function (state.lineSearch). If this function is not\n provided, then a simple learningRate is used to produce fixed\n size steps. Fixed size steps are much less costly than line\n searches, and can be useful for stochastic problems.\n The learning rate is used even when a line search is provided.\n This is also useful for large-scale stochastic problems, where\n opfunc is a noisy approximation of f(x). In that case, the learning\n rate allows a reduction of confidence in the step size.\n\n :param max_iter Maximum number of iterations allowed\n :param max_eval Maximum number of function evaluations\n :param tolfun Termination tolerance on the first-order optimality\n :param tolx Termination tol on progress in terms of func/param changes\n :param ncorrection\n :param learningrate\n :param verbose\n :param linesearch A line search function\n :param linesearch_options If no line search provided, then a fixed step size is used\n >>> lbfgs = LBFGS()\n creating: createLBFGS\n \"\"\"\n\n def __init__(self, max_iter: int=20, max_eval: float=DOUBLEMAX, tolfun:\n float=1e-05, tolx: float=1e-09, ncorrection: int=100, learningrate:\n float=1.0, verbose: bool=False, linesearch: Any=None,\n linesearch_options: Optional[Dict[Any, Any]]=None) ->None:\n from bigdl.dllib.optim.optimizer import LBFGS as BLBFGS\n self.optimizer = BLBFGS(max_iter, max_eval, tolfun, tolx,\n ncorrection, learningrate, verbose, linesearch,\n linesearch_options, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.LBFGS':\n return self.optimizer\n\n\nclass Adadelta(Optimizer):\n \"\"\"\n Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701\n\n :param decayrate interpolation parameter rho\n :param epsilon for numerical stability\n >>> adagrad = Adadelta()\n creating: createAdadelta\n \"\"\"\n\n def __init__(self, decayrate: float=0.9, epsilon: float=1e-10) ->None:\n from bigdl.dllib.optim.optimizer import Adadelta as BAdadelta\n self.optimizer = BAdadelta(decayrate, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adadelta':\n return self.optimizer\n\n\nclass Adam(Optimizer):\n \"\"\"\n An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> adam = Adam()\n creating: createAdam\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08\n ) ->None:\n from bigdl.dllib.optim.optimizer import Adam as BAdam\n self.optimizer = BAdam(learningrate, learningrate_decay, beta1,\n beta2, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adam':\n return self.optimizer\n\n\nclass ParallelAdam(Optimizer):\n \"\"\"\n An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> pAdam = ParallelAdam()\n creating: createParallelAdam\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08,\n parallel_num: int=-1) ->None:\n from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam\n self.optimizer = BParallelAdam(learningrate, learningrate_decay,\n beta1, beta2, epsilon, parallel_num, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.ParallelAdam':\n return self.optimizer\n\n\nclass Ftrl(Optimizer):\n \"\"\"\n An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.\n Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.\n\n :param learningrate learning rate\n :param learningrate_power double, must be less or equal to zero. Default is -0.5.\n :param initial_accumulator_value double, the starting value for accumulators,\n require zero or positive values.\n :param l1_regularization_strength double, must be greater or equal to zero. Default is zero.\n :param l2_regularization_strength double, must be greater or equal to zero. Default is zero.\n :param l2_shrinkage_regularization_strength double, must be greater or equal to zero.\n Default is zero. This differs from l2RegularizationStrength above. L2 above is a\n stabilization penalty, whereas this one is a magnitude penalty.\n >>> ftrl = Ftrl()\n creating: createFtrl\n >>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)\n creating: createFtrl\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_power: float\n =-0.5, initial_accumulator_value: float=0.1,\n l1_regularization_strength: float=0.0, l2_regularization_strength:\n float=0.0, l2_shrinkage_regularization_strength: float=0.0) ->None:\n from bigdl.dllib.optim.optimizer import Ftrl as BFtrl\n self.optimizer = BFtrl(learningrate, learningrate_power,\n initial_accumulator_value, l1_regularization_strength,\n l2_regularization_strength,\n l2_shrinkage_regularization_strength, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Ftrl':\n return self.optimizer\n\n\nclass Adamax(Optimizer):\n \"\"\"\n An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> adagrad = Adamax()\n creating: createAdamax\n \"\"\"\n\n def __init__(self, learningrate: float=0.002, beta1: float=0.9, beta2:\n float=0.999, epsilon: float=1e-38) ->None:\n from bigdl.dllib.optim.optimizer import Adamax as BAdamax\n self.optimizer = BAdamax(learningrate, beta1, beta2, epsilon,\n bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adamax':\n return self.optimizer\n\n\nclass RMSprop(Optimizer):\n \"\"\"\n An implementation of RMSprop\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param decayrate decay rate, also called rho\n :param epsilon for numerical stability\n >>> adagrad = RMSprop()\n creating: createRMSprop\n \"\"\"\n\n def __init__(self, learningrate: float=0.01, learningrate_decay: float=\n 0.0, decayrate: float=0.99, epsilon: float=1e-08) ->None:\n from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop\n self.optimizer = BRMSprop(learningrate, learningrate_decay,\n decayrate, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.RMSprop':\n return self.optimizer\n",
"step-3": "<mask token>\n\n\nclass SGD(Optimizer):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Adagrad(Optimizer):\n \"\"\"\n An implementation of Adagrad. See the original paper:\n http://jmlr.org/papers/volume12/duchi11a/duchi11a.pdf\n\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param weightdecay weight decay\n >>> adagrad = Adagrad()\n creating: createAdagrad\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, weightdecay: float=0.0) ->None:\n from bigdl.dllib.optim.optimizer import Adagrad as BAdagrad\n self.optimizer = BAdagrad(learningrate, learningrate_decay,\n weightdecay, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adagrad':\n return self.optimizer\n\n\nclass LBFGS(Optimizer):\n \"\"\"\n This implementation of L-BFGS relies on a user-provided line\n search function (state.lineSearch). If this function is not\n provided, then a simple learningRate is used to produce fixed\n size steps. Fixed size steps are much less costly than line\n searches, and can be useful for stochastic problems.\n The learning rate is used even when a line search is provided.\n This is also useful for large-scale stochastic problems, where\n opfunc is a noisy approximation of f(x). In that case, the learning\n rate allows a reduction of confidence in the step size.\n\n :param max_iter Maximum number of iterations allowed\n :param max_eval Maximum number of function evaluations\n :param tolfun Termination tolerance on the first-order optimality\n :param tolx Termination tol on progress in terms of func/param changes\n :param ncorrection\n :param learningrate\n :param verbose\n :param linesearch A line search function\n :param linesearch_options If no line search provided, then a fixed step size is used\n >>> lbfgs = LBFGS()\n creating: createLBFGS\n \"\"\"\n\n def __init__(self, max_iter: int=20, max_eval: float=DOUBLEMAX, tolfun:\n float=1e-05, tolx: float=1e-09, ncorrection: int=100, learningrate:\n float=1.0, verbose: bool=False, linesearch: Any=None,\n linesearch_options: Optional[Dict[Any, Any]]=None) ->None:\n from bigdl.dllib.optim.optimizer import LBFGS as BLBFGS\n self.optimizer = BLBFGS(max_iter, max_eval, tolfun, tolx,\n ncorrection, learningrate, verbose, linesearch,\n linesearch_options, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.LBFGS':\n return self.optimizer\n\n\nclass Adadelta(Optimizer):\n \"\"\"\n Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701\n\n :param decayrate interpolation parameter rho\n :param epsilon for numerical stability\n >>> adagrad = Adadelta()\n creating: createAdadelta\n \"\"\"\n\n def __init__(self, decayrate: float=0.9, epsilon: float=1e-10) ->None:\n from bigdl.dllib.optim.optimizer import Adadelta as BAdadelta\n self.optimizer = BAdadelta(decayrate, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adadelta':\n return self.optimizer\n\n\nclass Adam(Optimizer):\n \"\"\"\n An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> adam = Adam()\n creating: createAdam\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08\n ) ->None:\n from bigdl.dllib.optim.optimizer import Adam as BAdam\n self.optimizer = BAdam(learningrate, learningrate_decay, beta1,\n beta2, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adam':\n return self.optimizer\n\n\nclass ParallelAdam(Optimizer):\n \"\"\"\n An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> pAdam = ParallelAdam()\n creating: createParallelAdam\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08,\n parallel_num: int=-1) ->None:\n from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam\n self.optimizer = BParallelAdam(learningrate, learningrate_decay,\n beta1, beta2, epsilon, parallel_num, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.ParallelAdam':\n return self.optimizer\n\n\nclass Ftrl(Optimizer):\n \"\"\"\n An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.\n Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.\n\n :param learningrate learning rate\n :param learningrate_power double, must be less or equal to zero. Default is -0.5.\n :param initial_accumulator_value double, the starting value for accumulators,\n require zero or positive values.\n :param l1_regularization_strength double, must be greater or equal to zero. Default is zero.\n :param l2_regularization_strength double, must be greater or equal to zero. Default is zero.\n :param l2_shrinkage_regularization_strength double, must be greater or equal to zero.\n Default is zero. This differs from l2RegularizationStrength above. L2 above is a\n stabilization penalty, whereas this one is a magnitude penalty.\n >>> ftrl = Ftrl()\n creating: createFtrl\n >>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)\n creating: createFtrl\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_power: float\n =-0.5, initial_accumulator_value: float=0.1,\n l1_regularization_strength: float=0.0, l2_regularization_strength:\n float=0.0, l2_shrinkage_regularization_strength: float=0.0) ->None:\n from bigdl.dllib.optim.optimizer import Ftrl as BFtrl\n self.optimizer = BFtrl(learningrate, learningrate_power,\n initial_accumulator_value, l1_regularization_strength,\n l2_regularization_strength,\n l2_shrinkage_regularization_strength, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Ftrl':\n return self.optimizer\n\n\nclass Adamax(Optimizer):\n \"\"\"\n An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> adagrad = Adamax()\n creating: createAdamax\n \"\"\"\n\n def __init__(self, learningrate: float=0.002, beta1: float=0.9, beta2:\n float=0.999, epsilon: float=1e-38) ->None:\n from bigdl.dllib.optim.optimizer import Adamax as BAdamax\n self.optimizer = BAdamax(learningrate, beta1, beta2, epsilon,\n bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adamax':\n return self.optimizer\n\n\nclass RMSprop(Optimizer):\n \"\"\"\n An implementation of RMSprop\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param decayrate decay rate, also called rho\n :param epsilon for numerical stability\n >>> adagrad = RMSprop()\n creating: createRMSprop\n \"\"\"\n\n def __init__(self, learningrate: float=0.01, learningrate_decay: float=\n 0.0, decayrate: float=0.99, epsilon: float=1e-08) ->None:\n from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop\n self.optimizer = BRMSprop(learningrate, learningrate_decay,\n decayrate, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.RMSprop':\n return self.optimizer\n",
"step-4": "<mask token>\n\n\nclass SGD(Optimizer):\n <mask token>\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, weightdecay: float=0.0, momentum: float=0.0, dampening: float\n =DOUBLEMAX, nesterov: bool=False, learningrate_schedule: Optional[\n 'Scheduler']=None, learningrates: Optional['np.ndarray']=None,\n weightdecays: Optional['np.ndarray']=None) ->None:\n from bigdl.dllib.optim.optimizer import SGD as BSGD\n invalidInputError(isinstance(learningrate_schedule, Scheduler),\n 'learningrate_schedule should be an bigdl.orca.learn.optimizers.schedule.Scheduler, but got {learningrate_schedule}'\n )\n self.optimizer = BSGD(learningrate, learningrate_decay, weightdecay,\n momentum, dampening, nesterov, learningrate_schedule.\n get_scheduler(), learningrates, weightdecays, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.SGD':\n return self.optimizer\n\n\nclass Adagrad(Optimizer):\n \"\"\"\n An implementation of Adagrad. See the original paper:\n http://jmlr.org/papers/volume12/duchi11a/duchi11a.pdf\n\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param weightdecay weight decay\n >>> adagrad = Adagrad()\n creating: createAdagrad\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, weightdecay: float=0.0) ->None:\n from bigdl.dllib.optim.optimizer import Adagrad as BAdagrad\n self.optimizer = BAdagrad(learningrate, learningrate_decay,\n weightdecay, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adagrad':\n return self.optimizer\n\n\nclass LBFGS(Optimizer):\n \"\"\"\n This implementation of L-BFGS relies on a user-provided line\n search function (state.lineSearch). If this function is not\n provided, then a simple learningRate is used to produce fixed\n size steps. Fixed size steps are much less costly than line\n searches, and can be useful for stochastic problems.\n The learning rate is used even when a line search is provided.\n This is also useful for large-scale stochastic problems, where\n opfunc is a noisy approximation of f(x). In that case, the learning\n rate allows a reduction of confidence in the step size.\n\n :param max_iter Maximum number of iterations allowed\n :param max_eval Maximum number of function evaluations\n :param tolfun Termination tolerance on the first-order optimality\n :param tolx Termination tol on progress in terms of func/param changes\n :param ncorrection\n :param learningrate\n :param verbose\n :param linesearch A line search function\n :param linesearch_options If no line search provided, then a fixed step size is used\n >>> lbfgs = LBFGS()\n creating: createLBFGS\n \"\"\"\n\n def __init__(self, max_iter: int=20, max_eval: float=DOUBLEMAX, tolfun:\n float=1e-05, tolx: float=1e-09, ncorrection: int=100, learningrate:\n float=1.0, verbose: bool=False, linesearch: Any=None,\n linesearch_options: Optional[Dict[Any, Any]]=None) ->None:\n from bigdl.dllib.optim.optimizer import LBFGS as BLBFGS\n self.optimizer = BLBFGS(max_iter, max_eval, tolfun, tolx,\n ncorrection, learningrate, verbose, linesearch,\n linesearch_options, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.LBFGS':\n return self.optimizer\n\n\nclass Adadelta(Optimizer):\n \"\"\"\n Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701\n\n :param decayrate interpolation parameter rho\n :param epsilon for numerical stability\n >>> adagrad = Adadelta()\n creating: createAdadelta\n \"\"\"\n\n def __init__(self, decayrate: float=0.9, epsilon: float=1e-10) ->None:\n from bigdl.dllib.optim.optimizer import Adadelta as BAdadelta\n self.optimizer = BAdadelta(decayrate, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adadelta':\n return self.optimizer\n\n\nclass Adam(Optimizer):\n \"\"\"\n An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> adam = Adam()\n creating: createAdam\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08\n ) ->None:\n from bigdl.dllib.optim.optimizer import Adam as BAdam\n self.optimizer = BAdam(learningrate, learningrate_decay, beta1,\n beta2, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adam':\n return self.optimizer\n\n\nclass ParallelAdam(Optimizer):\n \"\"\"\n An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> pAdam = ParallelAdam()\n creating: createParallelAdam\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_decay: float\n =0.0, beta1: float=0.9, beta2: float=0.999, epsilon: float=1e-08,\n parallel_num: int=-1) ->None:\n from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam\n self.optimizer = BParallelAdam(learningrate, learningrate_decay,\n beta1, beta2, epsilon, parallel_num, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.ParallelAdam':\n return self.optimizer\n\n\nclass Ftrl(Optimizer):\n \"\"\"\n An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.\n Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.\n\n :param learningrate learning rate\n :param learningrate_power double, must be less or equal to zero. Default is -0.5.\n :param initial_accumulator_value double, the starting value for accumulators,\n require zero or positive values.\n :param l1_regularization_strength double, must be greater or equal to zero. Default is zero.\n :param l2_regularization_strength double, must be greater or equal to zero. Default is zero.\n :param l2_shrinkage_regularization_strength double, must be greater or equal to zero.\n Default is zero. This differs from l2RegularizationStrength above. L2 above is a\n stabilization penalty, whereas this one is a magnitude penalty.\n >>> ftrl = Ftrl()\n creating: createFtrl\n >>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)\n creating: createFtrl\n \"\"\"\n\n def __init__(self, learningrate: float=0.001, learningrate_power: float\n =-0.5, initial_accumulator_value: float=0.1,\n l1_regularization_strength: float=0.0, l2_regularization_strength:\n float=0.0, l2_shrinkage_regularization_strength: float=0.0) ->None:\n from bigdl.dllib.optim.optimizer import Ftrl as BFtrl\n self.optimizer = BFtrl(learningrate, learningrate_power,\n initial_accumulator_value, l1_regularization_strength,\n l2_regularization_strength,\n l2_shrinkage_regularization_strength, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Ftrl':\n return self.optimizer\n\n\nclass Adamax(Optimizer):\n \"\"\"\n An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> adagrad = Adamax()\n creating: createAdamax\n \"\"\"\n\n def __init__(self, learningrate: float=0.002, beta1: float=0.9, beta2:\n float=0.999, epsilon: float=1e-38) ->None:\n from bigdl.dllib.optim.optimizer import Adamax as BAdamax\n self.optimizer = BAdamax(learningrate, beta1, beta2, epsilon,\n bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.Adamax':\n return self.optimizer\n\n\nclass RMSprop(Optimizer):\n \"\"\"\n An implementation of RMSprop\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param decayrate decay rate, also called rho\n :param epsilon for numerical stability\n >>> adagrad = RMSprop()\n creating: createRMSprop\n \"\"\"\n\n def __init__(self, learningrate: float=0.01, learningrate_decay: float=\n 0.0, decayrate: float=0.99, epsilon: float=1e-08) ->None:\n from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop\n self.optimizer = BRMSprop(learningrate, learningrate_decay,\n decayrate, epsilon, bigdl_type='float')\n\n def get_optimizer(self) ->'optimizer.RMSprop':\n return self.optimizer\n",
"step-5": "#\n# Copyright 2016 The BigDL Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom abc import ABC, abstractmethod\n\nfrom bigdl.dllib.utils.common import DOUBLEMAX\nfrom bigdl.orca.learn.optimizers.schedule import Scheduler\nfrom bigdl.dllib.utils.log4Error import invalidInputError\n\nfrom typing import (Any, Optional, Dict, TYPE_CHECKING)\n\nif TYPE_CHECKING:\n from bigdl.dllib.optim import optimizer\n import numpy as np\n\n\nclass Optimizer(ABC):\n\n @abstractmethod\n def get_optimizer(self):\n pass\n\n\nclass SGD(Optimizer):\n \"\"\"\n A plain implementation of SGD\n\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param weightdecay weight decay\n :param momentum momentum\n :param dampening dampening for momentum\n :param nesterov enables Nesterov momentum\n :param learningrates 1D tensor of individual learning rates\n :param weightdecays 1D tensor of individual weight decays\n >>> sgd = SGD()\n creating: createDefault\n creating: createSGD\n \"\"\"\n\n def __init__(self,\n learningrate: float = 1e-3,\n learningrate_decay: float = 0.0,\n weightdecay: float = 0.0,\n momentum: float = 0.0,\n dampening: float = DOUBLEMAX,\n nesterov: bool = False,\n learningrate_schedule: Optional[\"Scheduler\"] = None,\n learningrates: Optional[\"np.ndarray\"] = None,\n weightdecays: Optional[\"np.ndarray\"] = None) -> None:\n from bigdl.dllib.optim.optimizer import SGD as BSGD\n invalidInputError(isinstance(learningrate_schedule, Scheduler),\n \"learningrate_schedule should be an \"\n \"bigdl.orca.learn.optimizers.schedule.Scheduler,\"\n \" but got {learningrate_schedule}\")\n self.optimizer = BSGD(learningrate,\n learningrate_decay,\n weightdecay,\n momentum,\n dampening,\n nesterov,\n learningrate_schedule.get_scheduler(), # type: ignore\n learningrates,\n weightdecays,\n bigdl_type=\"float\")\n\n def get_optimizer(self) -> \"optimizer.SGD\":\n return self.optimizer\n\n\nclass Adagrad(Optimizer):\n \"\"\"\n An implementation of Adagrad. See the original paper:\n http://jmlr.org/papers/volume12/duchi11a/duchi11a.pdf\n\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param weightdecay weight decay\n >>> adagrad = Adagrad()\n creating: createAdagrad\n \"\"\"\n\n def __init__(self,\n learningrate: float = 1e-3,\n learningrate_decay: float = 0.0,\n weightdecay: float = 0.0) -> None:\n from bigdl.dllib.optim.optimizer import Adagrad as BAdagrad\n self.optimizer = BAdagrad(learningrate, learningrate_decay,\n weightdecay, bigdl_type=\"float\")\n\n def get_optimizer(self) -> \"optimizer.Adagrad\":\n return self.optimizer\n\n\nclass LBFGS(Optimizer):\n \"\"\"\n This implementation of L-BFGS relies on a user-provided line\n search function (state.lineSearch). If this function is not\n provided, then a simple learningRate is used to produce fixed\n size steps. Fixed size steps are much less costly than line\n searches, and can be useful for stochastic problems.\n The learning rate is used even when a line search is provided.\n This is also useful for large-scale stochastic problems, where\n opfunc is a noisy approximation of f(x). In that case, the learning\n rate allows a reduction of confidence in the step size.\n\n :param max_iter Maximum number of iterations allowed\n :param max_eval Maximum number of function evaluations\n :param tolfun Termination tolerance on the first-order optimality\n :param tolx Termination tol on progress in terms of func/param changes\n :param ncorrection\n :param learningrate\n :param verbose\n :param linesearch A line search function\n :param linesearch_options If no line search provided, then a fixed step size is used\n >>> lbfgs = LBFGS()\n creating: createLBFGS\n \"\"\"\n\n def __init__(self,\n max_iter: int = 20,\n max_eval: float = DOUBLEMAX,\n tolfun: float = 1e-5,\n tolx: float = 1e-9,\n ncorrection: int = 100,\n learningrate: float = 1.0,\n verbose: bool = False,\n linesearch: Any = None,\n linesearch_options: Optional[Dict[Any, Any]]=None) -> None:\n from bigdl.dllib.optim.optimizer import LBFGS as BLBFGS\n self.optimizer = BLBFGS(\n max_iter,\n max_eval,\n tolfun,\n tolx,\n ncorrection,\n learningrate,\n verbose,\n linesearch,\n linesearch_options,\n bigdl_type=\"float\"\n )\n\n def get_optimizer(self) -> \"optimizer.LBFGS\":\n return self.optimizer\n\n\nclass Adadelta(Optimizer):\n \"\"\"\n Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701\n\n :param decayrate interpolation parameter rho\n :param epsilon for numerical stability\n >>> adagrad = Adadelta()\n creating: createAdadelta\n \"\"\"\n\n def __init__(self,\n decayrate: float = 0.9,\n epsilon: float = 1e-10) -> None:\n from bigdl.dllib.optim.optimizer import Adadelta as BAdadelta\n self.optimizer = BAdadelta(decayrate,\n epsilon,\n bigdl_type=\"float\")\n\n def get_optimizer(self) -> \"optimizer.Adadelta\":\n return self.optimizer\n\n\nclass Adam(Optimizer):\n \"\"\"\n An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> adam = Adam()\n creating: createAdam\n \"\"\"\n\n def __init__(self,\n learningrate: float = 1e-3,\n learningrate_decay: float = 0.0,\n beta1: float = 0.9,\n beta2: float = 0.999,\n epsilon: float = 1e-8) -> None:\n from bigdl.dllib.optim.optimizer import Adam as BAdam\n self.optimizer = BAdam(learningrate,\n learningrate_decay,\n beta1,\n beta2,\n epsilon,\n bigdl_type=\"float\")\n\n def get_optimizer(self) -> \"optimizer.Adam\":\n return self.optimizer\n\n\nclass ParallelAdam(Optimizer):\n \"\"\"\n An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> pAdam = ParallelAdam()\n creating: createParallelAdam\n \"\"\"\n\n def __init__(self,\n learningrate: float = 1e-3,\n learningrate_decay: float = 0.0,\n beta1: float = 0.9,\n beta2: float = 0.999,\n epsilon: float = 1e-8,\n parallel_num: int = -1) -> None:\n from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam\n self.optimizer = BParallelAdam(learningrate,\n learningrate_decay,\n beta1,\n beta2,\n epsilon,\n parallel_num,\n bigdl_type=\"float\")\n\n def get_optimizer(self) -> \"optimizer.ParallelAdam\":\n return self.optimizer\n\n\nclass Ftrl(Optimizer):\n \"\"\"\n An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.\n Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.\n\n :param learningrate learning rate\n :param learningrate_power double, must be less or equal to zero. Default is -0.5.\n :param initial_accumulator_value double, the starting value for accumulators,\n require zero or positive values.\n :param l1_regularization_strength double, must be greater or equal to zero. Default is zero.\n :param l2_regularization_strength double, must be greater or equal to zero. Default is zero.\n :param l2_shrinkage_regularization_strength double, must be greater or equal to zero.\n Default is zero. This differs from l2RegularizationStrength above. L2 above is a\n stabilization penalty, whereas this one is a magnitude penalty.\n >>> ftrl = Ftrl()\n creating: createFtrl\n >>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)\n creating: createFtrl\n \"\"\"\n\n def __init__(self,\n learningrate: float = 1e-3,\n learningrate_power: float = -0.5,\n initial_accumulator_value: float = 0.1,\n l1_regularization_strength: float = 0.0,\n l2_regularization_strength: float = 0.0,\n l2_shrinkage_regularization_strength: float = 0.0) -> None:\n from bigdl.dllib.optim.optimizer import Ftrl as BFtrl\n self.optimizer = BFtrl(learningrate,\n learningrate_power,\n initial_accumulator_value,\n l1_regularization_strength,\n l2_regularization_strength,\n l2_shrinkage_regularization_strength,\n bigdl_type=\"float\")\n\n def get_optimizer(self) -> \"optimizer.Ftrl\":\n return self.optimizer\n\n\nclass Adamax(Optimizer):\n \"\"\"\n An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf\n :param learningrate learning rate\n :param beta1 first moment coefficient\n :param beta2 second moment coefficient\n :param epsilon for numerical stability\n >>> adagrad = Adamax()\n creating: createAdamax\n \"\"\"\n\n def __init__(self,\n learningrate: float = 0.002,\n beta1: float = 0.9,\n beta2: float = 0.999,\n epsilon: float = 1e-38) -> None:\n from bigdl.dllib.optim.optimizer import Adamax as BAdamax\n self.optimizer = BAdamax(learningrate,\n beta1,\n beta2,\n epsilon,\n bigdl_type=\"float\")\n\n def get_optimizer(self) -> \"optimizer.Adamax\":\n return self.optimizer\n\n\nclass RMSprop(Optimizer):\n \"\"\"\n An implementation of RMSprop\n :param learningrate learning rate\n :param learningrate_decay learning rate decay\n :param decayrate decay rate, also called rho\n :param epsilon for numerical stability\n >>> adagrad = RMSprop()\n creating: createRMSprop\n \"\"\"\n\n def __init__(self,\n learningrate: float = 1e-2,\n learningrate_decay: float = 0.0,\n decayrate: float = 0.99,\n epsilon: float = 1e-8) -> None:\n from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop\n self.optimizer = BRMSprop(learningrate,\n learningrate_decay,\n decayrate,\n epsilon,\n bigdl_type=\"float\")\n\n def get_optimizer(self) -> \"optimizer.RMSprop\":\n return self.optimizer\n",
"step-ids": [
19,
29,
33,
35,
41
]
}
|
[
19,
29,
33,
35,
41
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
api_id = 0
api_hash = '00000000000000000000000'
phone = '+000000000000'
username = 'theone'
project_id = 0
<|reserved_special_token_1|>
# (1) Obtain your values here (https://core.telegram.org/api/obtaining_api_id)
api_id = 000000
api_hash = '00000000000000000000000'
phone = '+000000000000'
username = 'theone'
project_id = 000000000
|
flexible
|
{
"blob_id": "a5646a5d42dbf6e70e9d18f28513ee2df68a28b1",
"index": 6886,
"step-1": "<mask token>\n",
"step-2": "api_id = 0\napi_hash = '00000000000000000000000'\nphone = '+000000000000'\nusername = 'theone'\nproject_id = 0\n",
"step-3": "# (1) Obtain your values here (https://core.telegram.org/api/obtaining_api_id)\napi_id = 000000\napi_hash = '00000000000000000000000'\n\nphone = '+000000000000'\nusername = 'theone'\n\nproject_id = 000000000\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
"""
测试用例
"""
import unittest
import jsonpath
import requests
from apiunittest.lib.loadIni import LoadIni
from apiunittest.keyword.keyword import Keyword
from apiunittest.lib.log import logger
from ddt import ddt, file_data
@ddt
class ApiTest(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.keyword = Keyword()
cls.cookie = None
cls.confData = LoadIni('config.ini')
logger.info('----------用例开始执行----------')
# 登录
@file_data('../data/data.yaml')
def test_1_login(self, username, password):
s = requests.Session()
loginUrl = self.confData.getConfig('urlConfig', 'login')
data = {
'uname': username,
'upass': password,
'encode': 1
}
res = s.post(url=loginUrl, data=data)
logger.info(res.text)
cookie = dict(res.cookies)
sess = jsonpath.jsonpath(cookie, '$..{0}'.format('PHPSESSION'))
phpSession = 'PHP_SESSION=' + sess[0]
ApiTest.cookie = phpSession
logger.info('用例执行成功')
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "b28bada020ac593783ac62994bb45311ebb78813",
"index": 9055,
"step-1": "<mask token>\n\n\n@ddt\nclass ApiTest(unittest.TestCase):\n\n @classmethod\n def setUpClass(cls) ->None:\n cls.keyword = Keyword()\n cls.cookie = None\n cls.confData = LoadIni('config.ini')\n logger.info('----------用例开始执行----------')\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@ddt\nclass ApiTest(unittest.TestCase):\n\n @classmethod\n def setUpClass(cls) ->None:\n cls.keyword = Keyword()\n cls.cookie = None\n cls.confData = LoadIni('config.ini')\n logger.info('----------用例开始执行----------')\n\n @file_data('../data/data.yaml')\n def test_1_login(self, username, password):\n s = requests.Session()\n loginUrl = self.confData.getConfig('urlConfig', 'login')\n data = {'uname': username, 'upass': password, 'encode': 1}\n res = s.post(url=loginUrl, data=data)\n logger.info(res.text)\n cookie = dict(res.cookies)\n sess = jsonpath.jsonpath(cookie, '$..{0}'.format('PHPSESSION'))\n phpSession = 'PHP_SESSION=' + sess[0]\n ApiTest.cookie = phpSession\n logger.info('用例执行成功')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\n@ddt\nclass ApiTest(unittest.TestCase):\n\n @classmethod\n def setUpClass(cls) ->None:\n cls.keyword = Keyword()\n cls.cookie = None\n cls.confData = LoadIni('config.ini')\n logger.info('----------用例开始执行----------')\n\n @file_data('../data/data.yaml')\n def test_1_login(self, username, password):\n s = requests.Session()\n loginUrl = self.confData.getConfig('urlConfig', 'login')\n data = {'uname': username, 'upass': password, 'encode': 1}\n res = s.post(url=loginUrl, data=data)\n logger.info(res.text)\n cookie = dict(res.cookies)\n sess = jsonpath.jsonpath(cookie, '$..{0}'.format('PHPSESSION'))\n phpSession = 'PHP_SESSION=' + sess[0]\n ApiTest.cookie = phpSession\n logger.info('用例执行成功')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-4": "<mask token>\nimport unittest\nimport jsonpath\nimport requests\nfrom apiunittest.lib.loadIni import LoadIni\nfrom apiunittest.keyword.keyword import Keyword\nfrom apiunittest.lib.log import logger\nfrom ddt import ddt, file_data\n\n\n@ddt\nclass ApiTest(unittest.TestCase):\n\n @classmethod\n def setUpClass(cls) ->None:\n cls.keyword = Keyword()\n cls.cookie = None\n cls.confData = LoadIni('config.ini')\n logger.info('----------用例开始执行----------')\n\n @file_data('../data/data.yaml')\n def test_1_login(self, username, password):\n s = requests.Session()\n loginUrl = self.confData.getConfig('urlConfig', 'login')\n data = {'uname': username, 'upass': password, 'encode': 1}\n res = s.post(url=loginUrl, data=data)\n logger.info(res.text)\n cookie = dict(res.cookies)\n sess = jsonpath.jsonpath(cookie, '$..{0}'.format('PHPSESSION'))\n phpSession = 'PHP_SESSION=' + sess[0]\n ApiTest.cookie = phpSession\n logger.info('用例执行成功')\n\n\nif __name__ == '__main__':\n unittest.main()\n",
"step-5": "\"\"\"\r\n 测试用例\r\n\"\"\"\r\nimport unittest\r\nimport jsonpath\r\nimport requests\r\nfrom apiunittest.lib.loadIni import LoadIni\r\nfrom apiunittest.keyword.keyword import Keyword\r\nfrom apiunittest.lib.log import logger\r\nfrom ddt import ddt, file_data\r\n\r\n\r\n@ddt\r\nclass ApiTest(unittest.TestCase):\r\n\r\n @classmethod\r\n def setUpClass(cls) -> None:\r\n cls.keyword = Keyword()\r\n cls.cookie = None\r\n cls.confData = LoadIni('config.ini')\r\n logger.info('----------用例开始执行----------')\r\n\r\n # 登录\r\n @file_data('../data/data.yaml')\r\n def test_1_login(self, username, password):\r\n s = requests.Session()\r\n loginUrl = self.confData.getConfig('urlConfig', 'login')\r\n\r\n data = {\r\n 'uname': username,\r\n 'upass': password,\r\n 'encode': 1\r\n }\r\n res = s.post(url=loginUrl, data=data)\r\n logger.info(res.text)\r\n cookie = dict(res.cookies)\r\n sess = jsonpath.jsonpath(cookie, '$..{0}'.format('PHPSESSION'))\r\n phpSession = 'PHP_SESSION=' + sess[0]\r\n ApiTest.cookie = phpSession\r\n logger.info('用例执行成功')\r\n\r\n\r\nif __name__ == '__main__':\r\n unittest.main()\r\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
@app.route('/api/v1.0/stations')
def stations():
"""return a json list of stations from the dataset."""
stationquery = session.query(Station.station).all()
stationlist = list(np.ravel(stationquery))
return jsonify(stationlist)
<|reserved_special_token_0|>
@app.route('/api/v1.0/tobs/<startDate>/<endDate>')
def getTempObs(startDate, endDate):
"""Return the date and temperateure for 2017"""
results = session.query(Measurement.tobs).filter(Measurement.date >=
startDate).filter(Measurement.date <= endDate).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
@app.route('/api/v1.0/<startDate>/<endDate>')
@app.route('/api/v1.0/<startDate>')
def getTempStats(startDate, endDate='2018-31-12'):
"""Return temperature stats"""
if endDate == '2018-31-12':
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).all()
else:
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).filter(Measurement.date <= endDate
).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@app.route('/')
def welcome():
return jsonify({'Title': 'Welcome to hawaii weather info app',
'description':
'This api gives you the information about Hawaii stations, precipitation and temperature in a daterange'
, 'endpoints': ['/api/v1.0/precipitation', '/api/v1.0/stations',
'/api/v1.0/tobs', '/api/v1.0/<start>', '/api/v1.0/<start>/<end>']})
@app.route('/api/v1.0/precipitation')
def prcp():
prev_year = dt.date.today() - dt.timedelta(days=365)
prcp_each_day = session.query(Measurement.date, func.sum(Measurement.prcp)
).filter(Measurement.date >= prev_year).group_by(Measurement.date
).order_by(Measurement.date).all()
prcp_dict = dict(prcp_each_day)
return jsonify(prcp_dict)
@app.route('/api/v1.0/stations')
def stations():
"""return a json list of stations from the dataset."""
stationquery = session.query(Station.station).all()
stationlist = list(np.ravel(stationquery))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
"""Return a json list of Temperature Observations (tobs) for the previous year"""
prev_year = dt.date.today() - dt.timedelta(days=365)
tobsquery = session.query(Measurement.tobs).filter(Measurement.date >=
prev_year).all()
tobslist = list(np.ravel(tobsquery))
return jsonify(tobslist)
@app.errorhandler(404)
def page_not_found(e):
return (
'<h2> 404: Page Not Found </h2>Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>'
, 404)
@app.route('/api/v1.0/<start>', methods=['GET'])
def tobsinfo_start(start):
try:
if start:
sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs),
func.max(Measurement.tobs)]
calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',
Measurement.date) >= start).one()
return (
f'<h2> Temperature(F) informtion from {start} </h2>Minimum temp: {calcs[0]}<br>Average temp: {round(calcs[1], 2)}<br>Maximum temp: {round(calcs[2], 2)}<br>'
)
except:
abort(404)
@app.route('/api/v1.0/tobs/<startDate>/<endDate>')
def getTempObs(startDate, endDate):
"""Return the date and temperateure for 2017"""
results = session.query(Measurement.tobs).filter(Measurement.date >=
startDate).filter(Measurement.date <= endDate).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
@app.route('/api/v1.0/<startDate>/<endDate>')
@app.route('/api/v1.0/<startDate>')
def getTempStats(startDate, endDate='2018-31-12'):
"""Return temperature stats"""
if endDate == '2018-31-12':
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).all()
else:
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).filter(Measurement.date <= endDate
).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
Base.prepare(engine, reflect=True)
<|reserved_special_token_0|>
@app.route('/')
def welcome():
return jsonify({'Title': 'Welcome to hawaii weather info app',
'description':
'This api gives you the information about Hawaii stations, precipitation and temperature in a daterange'
, 'endpoints': ['/api/v1.0/precipitation', '/api/v1.0/stations',
'/api/v1.0/tobs', '/api/v1.0/<start>', '/api/v1.0/<start>/<end>']})
@app.route('/api/v1.0/precipitation')
def prcp():
prev_year = dt.date.today() - dt.timedelta(days=365)
prcp_each_day = session.query(Measurement.date, func.sum(Measurement.prcp)
).filter(Measurement.date >= prev_year).group_by(Measurement.date
).order_by(Measurement.date).all()
prcp_dict = dict(prcp_each_day)
return jsonify(prcp_dict)
@app.route('/api/v1.0/stations')
def stations():
"""return a json list of stations from the dataset."""
stationquery = session.query(Station.station).all()
stationlist = list(np.ravel(stationquery))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
"""Return a json list of Temperature Observations (tobs) for the previous year"""
prev_year = dt.date.today() - dt.timedelta(days=365)
tobsquery = session.query(Measurement.tobs).filter(Measurement.date >=
prev_year).all()
tobslist = list(np.ravel(tobsquery))
return jsonify(tobslist)
@app.errorhandler(404)
def page_not_found(e):
return (
'<h2> 404: Page Not Found </h2>Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>'
, 404)
@app.route('/api/v1.0/<start>', methods=['GET'])
def tobsinfo_start(start):
try:
if start:
sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs),
func.max(Measurement.tobs)]
calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',
Measurement.date) >= start).one()
return (
f'<h2> Temperature(F) informtion from {start} </h2>Minimum temp: {calcs[0]}<br>Average temp: {round(calcs[1], 2)}<br>Maximum temp: {round(calcs[2], 2)}<br>'
)
except:
abort(404)
@app.route('/api/v1.0/tobs/<startDate>/<endDate>')
def getTempObs(startDate, endDate):
"""Return the date and temperateure for 2017"""
results = session.query(Measurement.tobs).filter(Measurement.date >=
startDate).filter(Measurement.date <= endDate).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
@app.route('/api/v1.0/<startDate>/<endDate>')
@app.route('/api/v1.0/<startDate>')
def getTempStats(startDate, endDate='2018-31-12'):
"""Return temperature stats"""
if endDate == '2018-31-12':
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).all()
else:
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).filter(Measurement.date <= endDate
).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
engine = create_engine('sqlite:///hawaii.sqlite')
Base = automap_base()
Base.prepare(engine, reflect=True)
Measurement = Base.classes.measurement
Station = Base.classes.station
session = Session(engine)
app = Flask(__name__)
@app.route('/')
def welcome():
return jsonify({'Title': 'Welcome to hawaii weather info app',
'description':
'This api gives you the information about Hawaii stations, precipitation and temperature in a daterange'
, 'endpoints': ['/api/v1.0/precipitation', '/api/v1.0/stations',
'/api/v1.0/tobs', '/api/v1.0/<start>', '/api/v1.0/<start>/<end>']})
@app.route('/api/v1.0/precipitation')
def prcp():
prev_year = dt.date.today() - dt.timedelta(days=365)
prcp_each_day = session.query(Measurement.date, func.sum(Measurement.prcp)
).filter(Measurement.date >= prev_year).group_by(Measurement.date
).order_by(Measurement.date).all()
prcp_dict = dict(prcp_each_day)
return jsonify(prcp_dict)
@app.route('/api/v1.0/stations')
def stations():
"""return a json list of stations from the dataset."""
stationquery = session.query(Station.station).all()
stationlist = list(np.ravel(stationquery))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
"""Return a json list of Temperature Observations (tobs) for the previous year"""
prev_year = dt.date.today() - dt.timedelta(days=365)
tobsquery = session.query(Measurement.tobs).filter(Measurement.date >=
prev_year).all()
tobslist = list(np.ravel(tobsquery))
return jsonify(tobslist)
@app.errorhandler(404)
def page_not_found(e):
return (
'<h2> 404: Page Not Found </h2>Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>'
, 404)
@app.route('/api/v1.0/<start>', methods=['GET'])
def tobsinfo_start(start):
try:
if start:
sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs),
func.max(Measurement.tobs)]
calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',
Measurement.date) >= start).one()
return (
f'<h2> Temperature(F) informtion from {start} </h2>Minimum temp: {calcs[0]}<br>Average temp: {round(calcs[1], 2)}<br>Maximum temp: {round(calcs[2], 2)}<br>'
)
except:
abort(404)
@app.route('/api/v1.0/tobs/<startDate>/<endDate>')
def getTempObs(startDate, endDate):
"""Return the date and temperateure for 2017"""
results = session.query(Measurement.tobs).filter(Measurement.date >=
startDate).filter(Measurement.date <= endDate).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
@app.route('/api/v1.0/<startDate>/<endDate>')
@app.route('/api/v1.0/<startDate>')
def getTempStats(startDate, endDate='2018-31-12'):
"""Return temperature stats"""
if endDate == '2018-31-12':
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).all()
else:
results = session.query(func.min(Measurement.tobs), func.avg(
Measurement.tobs), func.max(Measurement.tobs)).filter(
Measurement.date >= startDate).filter(Measurement.date <= endDate
).all()
all_names = list(np.ravel(results))
return jsonify(all_names)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
import datetime as dt
import numpy as np
import pandas as pd
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
from flask import Flask, jsonify, render_template, abort
#creating engine to connect with hawaii sqlite database
engine = create_engine("sqlite:///hawaii.sqlite")
#using automap to load orm automatically
Base = automap_base()
# reflecting the tables from orm classes
Base.prepare(engine, reflect = True)
Measurement = Base.classes.measurement
Station = Base.classes.station
# print(Base.classes.keys())
#initiating session
session = Session(engine)
# initiating flask api
app = Flask(__name__)
@app.route('/')
def welcome():
return jsonify({"Title": "Welcome to hawaii weather info app",
"description": "This api gives you the information about Hawaii stations, precipitation and temperature in a daterange",
"endpoints":["/api/v1.0/precipitation",
"/api/v1.0/stations",
"/api/v1.0/tobs",
"/api/v1.0/<start>",
"/api/v1.0/<start>/<end>"]})
@app.route("/api/v1.0/precipitation")
def prcp():
prev_year = dt.date.today() - dt.timedelta(days=365)
# date_string = prev_year.strftime("%Y-%m-%d")
prcp_each_day = session.query(Measurement.date,func.sum(Measurement.prcp)).filter(Measurement.date >= prev_year).group_by(Measurement.date).order_by(Measurement.date).all()
prcp_dict = dict(prcp_each_day)
return jsonify(prcp_dict)
@app.route('/api/v1.0/stations')
def stations():
"""return a json list of stations from the dataset."""
stationquery = session.query(Station.station).all()
stationlist = list(np.ravel(stationquery))
return jsonify(stationlist)
@app.route('/api/v1.0/tobs')
def tobs():
"""Return a json list of Temperature Observations (tobs) for the previous year"""
prev_year = dt.date.today() - dt.timedelta(days=365)
# date_string = prev_year.strftime("%Y-%m-%d")
tobsquery = session.query(Measurement.tobs).filter(Measurement.date >= prev_year).all()
tobslist = list(np.ravel(tobsquery))
return jsonify(tobslist)
#executing the error handler page using 404 abort
@app.errorhandler(404)
def page_not_found(e):
return ("<h2> 404: Page Not Found </h2>"
"Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>"),404
@app.route('/api/v1.0/<start>', methods=["GET"])
# Return a json list of the minimum temperature, the average temperature, and the max temperature for a given start or start-end range.
# When given the start only, calculate TMIN, TAVG, and TMAX for all dates greater than and equal to the start date.
# When given the start and the end date, calculate the TMIN, TAVG, and TMAX for dates between the start and end date inclusive.
def tobsinfo_start(start):
# daterange = [date for dates in session.query(Measurement.date).all()]
try:
if start:# in daterange:
# start = func.strftime('%Y-%m-%d', 'start')
sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)]
calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',Measurement.date) >= start).one()
return (
f"<h2> Temperature(F) informtion from {start} </h2>"
f"Minimum temp: {calcs[0]}<br>"
f"Average temp: {round(calcs[1],2)}<br>"
f"Maximum temp: {round(calcs[2],2)}<br>"
)
except:
abort(404)
@app.route("/api/v1.0/tobs/<startDate>/<endDate>")
def getTempObs(startDate,endDate):
"""Return the date and temperateure for 2017"""
# Query all the date and the temperature details
results = session.query(Measurement.tobs). filter(Measurement.date >= startDate).filter(Measurement.date <= endDate).all()
# Convert list of tuples into normal list
all_names = list(np.ravel(results))
return jsonify(all_names)
# 12. Get the temperature stats for given date
@app.route("/api/v1.0/<startDate>/<endDate>")
@app.route("/api/v1.0/<startDate>")
def getTempStats(startDate,endDate='2018-31-12'):
"""Return temperature stats"""
#If end date is not given
if endDate == '2018-31-12':
results = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= startDate).all()
else:
# Query all the date and the temperature details og
results = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\
filter(Measurement.date >= startDate).filter(Measurement.date <= endDate).all()
# Convert list of tuples into normal list
all_names = list(np.ravel(results))
return jsonify(all_names)
if __name__ == '__main__':
app.run(debug=True)
|
flexible
|
{
"blob_id": "c295d769b85943a6ca89f9d213e79b78129a6ce9",
"index": 2031,
"step-1": "<mask token>\n\n\[email protected]('/api/v1.0/stations')\ndef stations():\n \"\"\"return a json list of stations from the dataset.\"\"\"\n stationquery = session.query(Station.station).all()\n stationlist = list(np.ravel(stationquery))\n return jsonify(stationlist)\n\n\n<mask token>\n\n\[email protected]('/api/v1.0/tobs/<startDate>/<endDate>')\ndef getTempObs(startDate, endDate):\n \"\"\"Return the date and temperateure for 2017\"\"\"\n results = session.query(Measurement.tobs).filter(Measurement.date >=\n startDate).filter(Measurement.date <= endDate).all()\n all_names = list(np.ravel(results))\n return jsonify(all_names)\n\n\[email protected]('/api/v1.0/<startDate>/<endDate>')\[email protected]('/api/v1.0/<startDate>')\ndef getTempStats(startDate, endDate='2018-31-12'):\n \"\"\"Return temperature stats\"\"\"\n if endDate == '2018-31-12':\n results = session.query(func.min(Measurement.tobs), func.avg(\n Measurement.tobs), func.max(Measurement.tobs)).filter(\n Measurement.date >= startDate).all()\n else:\n results = session.query(func.min(Measurement.tobs), func.avg(\n Measurement.tobs), func.max(Measurement.tobs)).filter(\n Measurement.date >= startDate).filter(Measurement.date <= endDate\n ).all()\n all_names = list(np.ravel(results))\n return jsonify(all_names)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\[email protected]('/')\ndef welcome():\n return jsonify({'Title': 'Welcome to hawaii weather info app',\n 'description':\n 'This api gives you the information about Hawaii stations, precipitation and temperature in a daterange'\n , 'endpoints': ['/api/v1.0/precipitation', '/api/v1.0/stations',\n '/api/v1.0/tobs', '/api/v1.0/<start>', '/api/v1.0/<start>/<end>']})\n\n\[email protected]('/api/v1.0/precipitation')\ndef prcp():\n prev_year = dt.date.today() - dt.timedelta(days=365)\n prcp_each_day = session.query(Measurement.date, func.sum(Measurement.prcp)\n ).filter(Measurement.date >= prev_year).group_by(Measurement.date\n ).order_by(Measurement.date).all()\n prcp_dict = dict(prcp_each_day)\n return jsonify(prcp_dict)\n\n\[email protected]('/api/v1.0/stations')\ndef stations():\n \"\"\"return a json list of stations from the dataset.\"\"\"\n stationquery = session.query(Station.station).all()\n stationlist = list(np.ravel(stationquery))\n return jsonify(stationlist)\n\n\[email protected]('/api/v1.0/tobs')\ndef tobs():\n \"\"\"Return a json list of Temperature Observations (tobs) for the previous year\"\"\"\n prev_year = dt.date.today() - dt.timedelta(days=365)\n tobsquery = session.query(Measurement.tobs).filter(Measurement.date >=\n prev_year).all()\n tobslist = list(np.ravel(tobsquery))\n return jsonify(tobslist)\n\n\[email protected](404)\ndef page_not_found(e):\n return (\n '<h2> 404: Page Not Found </h2>Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>'\n , 404)\n\n\[email protected]('/api/v1.0/<start>', methods=['GET'])\ndef tobsinfo_start(start):\n try:\n if start:\n sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs),\n func.max(Measurement.tobs)]\n calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',\n Measurement.date) >= start).one()\n return (\n f'<h2> Temperature(F) informtion from {start} </h2>Minimum temp: {calcs[0]}<br>Average temp: {round(calcs[1], 2)}<br>Maximum temp: {round(calcs[2], 2)}<br>'\n )\n except:\n abort(404)\n\n\[email protected]('/api/v1.0/tobs/<startDate>/<endDate>')\ndef getTempObs(startDate, endDate):\n \"\"\"Return the date and temperateure for 2017\"\"\"\n results = session.query(Measurement.tobs).filter(Measurement.date >=\n startDate).filter(Measurement.date <= endDate).all()\n all_names = list(np.ravel(results))\n return jsonify(all_names)\n\n\[email protected]('/api/v1.0/<startDate>/<endDate>')\[email protected]('/api/v1.0/<startDate>')\ndef getTempStats(startDate, endDate='2018-31-12'):\n \"\"\"Return temperature stats\"\"\"\n if endDate == '2018-31-12':\n results = session.query(func.min(Measurement.tobs), func.avg(\n Measurement.tobs), func.max(Measurement.tobs)).filter(\n Measurement.date >= startDate).all()\n else:\n results = session.query(func.min(Measurement.tobs), func.avg(\n Measurement.tobs), func.max(Measurement.tobs)).filter(\n Measurement.date >= startDate).filter(Measurement.date <= endDate\n ).all()\n all_names = list(np.ravel(results))\n return jsonify(all_names)\n\n\n<mask token>\n",
"step-3": "<mask token>\nBase.prepare(engine, reflect=True)\n<mask token>\n\n\[email protected]('/')\ndef welcome():\n return jsonify({'Title': 'Welcome to hawaii weather info app',\n 'description':\n 'This api gives you the information about Hawaii stations, precipitation and temperature in a daterange'\n , 'endpoints': ['/api/v1.0/precipitation', '/api/v1.0/stations',\n '/api/v1.0/tobs', '/api/v1.0/<start>', '/api/v1.0/<start>/<end>']})\n\n\[email protected]('/api/v1.0/precipitation')\ndef prcp():\n prev_year = dt.date.today() - dt.timedelta(days=365)\n prcp_each_day = session.query(Measurement.date, func.sum(Measurement.prcp)\n ).filter(Measurement.date >= prev_year).group_by(Measurement.date\n ).order_by(Measurement.date).all()\n prcp_dict = dict(prcp_each_day)\n return jsonify(prcp_dict)\n\n\[email protected]('/api/v1.0/stations')\ndef stations():\n \"\"\"return a json list of stations from the dataset.\"\"\"\n stationquery = session.query(Station.station).all()\n stationlist = list(np.ravel(stationquery))\n return jsonify(stationlist)\n\n\[email protected]('/api/v1.0/tobs')\ndef tobs():\n \"\"\"Return a json list of Temperature Observations (tobs) for the previous year\"\"\"\n prev_year = dt.date.today() - dt.timedelta(days=365)\n tobsquery = session.query(Measurement.tobs).filter(Measurement.date >=\n prev_year).all()\n tobslist = list(np.ravel(tobsquery))\n return jsonify(tobslist)\n\n\[email protected](404)\ndef page_not_found(e):\n return (\n '<h2> 404: Page Not Found </h2>Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>'\n , 404)\n\n\[email protected]('/api/v1.0/<start>', methods=['GET'])\ndef tobsinfo_start(start):\n try:\n if start:\n sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs),\n func.max(Measurement.tobs)]\n calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',\n Measurement.date) >= start).one()\n return (\n f'<h2> Temperature(F) informtion from {start} </h2>Minimum temp: {calcs[0]}<br>Average temp: {round(calcs[1], 2)}<br>Maximum temp: {round(calcs[2], 2)}<br>'\n )\n except:\n abort(404)\n\n\[email protected]('/api/v1.0/tobs/<startDate>/<endDate>')\ndef getTempObs(startDate, endDate):\n \"\"\"Return the date and temperateure for 2017\"\"\"\n results = session.query(Measurement.tobs).filter(Measurement.date >=\n startDate).filter(Measurement.date <= endDate).all()\n all_names = list(np.ravel(results))\n return jsonify(all_names)\n\n\[email protected]('/api/v1.0/<startDate>/<endDate>')\[email protected]('/api/v1.0/<startDate>')\ndef getTempStats(startDate, endDate='2018-31-12'):\n \"\"\"Return temperature stats\"\"\"\n if endDate == '2018-31-12':\n results = session.query(func.min(Measurement.tobs), func.avg(\n Measurement.tobs), func.max(Measurement.tobs)).filter(\n Measurement.date >= startDate).all()\n else:\n results = session.query(func.min(Measurement.tobs), func.avg(\n Measurement.tobs), func.max(Measurement.tobs)).filter(\n Measurement.date >= startDate).filter(Measurement.date <= endDate\n ).all()\n all_names = list(np.ravel(results))\n return jsonify(all_names)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "<mask token>\nengine = create_engine('sqlite:///hawaii.sqlite')\nBase = automap_base()\nBase.prepare(engine, reflect=True)\nMeasurement = Base.classes.measurement\nStation = Base.classes.station\nsession = Session(engine)\napp = Flask(__name__)\n\n\[email protected]('/')\ndef welcome():\n return jsonify({'Title': 'Welcome to hawaii weather info app',\n 'description':\n 'This api gives you the information about Hawaii stations, precipitation and temperature in a daterange'\n , 'endpoints': ['/api/v1.0/precipitation', '/api/v1.0/stations',\n '/api/v1.0/tobs', '/api/v1.0/<start>', '/api/v1.0/<start>/<end>']})\n\n\[email protected]('/api/v1.0/precipitation')\ndef prcp():\n prev_year = dt.date.today() - dt.timedelta(days=365)\n prcp_each_day = session.query(Measurement.date, func.sum(Measurement.prcp)\n ).filter(Measurement.date >= prev_year).group_by(Measurement.date\n ).order_by(Measurement.date).all()\n prcp_dict = dict(prcp_each_day)\n return jsonify(prcp_dict)\n\n\[email protected]('/api/v1.0/stations')\ndef stations():\n \"\"\"return a json list of stations from the dataset.\"\"\"\n stationquery = session.query(Station.station).all()\n stationlist = list(np.ravel(stationquery))\n return jsonify(stationlist)\n\n\[email protected]('/api/v1.0/tobs')\ndef tobs():\n \"\"\"Return a json list of Temperature Observations (tobs) for the previous year\"\"\"\n prev_year = dt.date.today() - dt.timedelta(days=365)\n tobsquery = session.query(Measurement.tobs).filter(Measurement.date >=\n prev_year).all()\n tobslist = list(np.ravel(tobsquery))\n return jsonify(tobslist)\n\n\[email protected](404)\ndef page_not_found(e):\n return (\n '<h2> 404: Page Not Found </h2>Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>'\n , 404)\n\n\[email protected]('/api/v1.0/<start>', methods=['GET'])\ndef tobsinfo_start(start):\n try:\n if start:\n sel = [func.min(Measurement.tobs), func.avg(Measurement.tobs),\n func.max(Measurement.tobs)]\n calcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',\n Measurement.date) >= start).one()\n return (\n f'<h2> Temperature(F) informtion from {start} </h2>Minimum temp: {calcs[0]}<br>Average temp: {round(calcs[1], 2)}<br>Maximum temp: {round(calcs[2], 2)}<br>'\n )\n except:\n abort(404)\n\n\[email protected]('/api/v1.0/tobs/<startDate>/<endDate>')\ndef getTempObs(startDate, endDate):\n \"\"\"Return the date and temperateure for 2017\"\"\"\n results = session.query(Measurement.tobs).filter(Measurement.date >=\n startDate).filter(Measurement.date <= endDate).all()\n all_names = list(np.ravel(results))\n return jsonify(all_names)\n\n\[email protected]('/api/v1.0/<startDate>/<endDate>')\[email protected]('/api/v1.0/<startDate>')\ndef getTempStats(startDate, endDate='2018-31-12'):\n \"\"\"Return temperature stats\"\"\"\n if endDate == '2018-31-12':\n results = session.query(func.min(Measurement.tobs), func.avg(\n Measurement.tobs), func.max(Measurement.tobs)).filter(\n Measurement.date >= startDate).all()\n else:\n results = session.query(func.min(Measurement.tobs), func.avg(\n Measurement.tobs), func.max(Measurement.tobs)).filter(\n Measurement.date >= startDate).filter(Measurement.date <= endDate\n ).all()\n all_names = list(np.ravel(results))\n return jsonify(all_names)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "import datetime as dt\nimport numpy as np\nimport pandas as pd\n\nimport sqlalchemy\nfrom sqlalchemy.ext.automap import automap_base\nfrom sqlalchemy.orm import Session\nfrom sqlalchemy import create_engine, func\n\nfrom flask import Flask, jsonify, render_template, abort\n\n#creating engine to connect with hawaii sqlite database\n\t\nengine = create_engine(\"sqlite:///hawaii.sqlite\")\n\n#using automap to load orm automatically\n\nBase = automap_base()\n\n# reflecting the tables from orm classes\n\nBase.prepare(engine, reflect = True)\n\nMeasurement = Base.classes.measurement\nStation = Base.classes.station\n# print(Base.classes.keys())\n\n#initiating session\nsession = Session(engine)\n\n# initiating flask api\n\napp = Flask(__name__)\n\[email protected]('/')\ndef welcome():\n return jsonify({\"Title\": \"Welcome to hawaii weather info app\",\n \t\"description\": \"This api gives you the information about Hawaii stations, precipitation and temperature in a daterange\",\n \t\"endpoints\":[\"/api/v1.0/precipitation\",\n \t\"/api/v1.0/stations\",\n \t\"/api/v1.0/tobs\",\n \t\"/api/v1.0/<start>\",\n \t\"/api/v1.0/<start>/<end>\"]})\n\n\[email protected](\"/api/v1.0/precipitation\")\n\ndef prcp():\n\tprev_year = dt.date.today() - dt.timedelta(days=365)\n\t# date_string = prev_year.strftime(\"%Y-%m-%d\")\n\n\tprcp_each_day = session.query(Measurement.date,func.sum(Measurement.prcp)).filter(Measurement.date >= prev_year).group_by(Measurement.date).order_by(Measurement.date).all()\n\t\n\tprcp_dict = dict(prcp_each_day)\n\treturn jsonify(prcp_dict)\n\t\n\[email protected]('/api/v1.0/stations')\n\ndef stations():\n\t\"\"\"return a json list of stations from the dataset.\"\"\"\n\t\n\n\tstationquery = session.query(Station.station).all()\n\n\tstationlist = list(np.ravel(stationquery))\n\t\n\treturn jsonify(stationlist)\n\n\[email protected]('/api/v1.0/tobs')\n\ndef tobs():\n\t\"\"\"Return a json list of Temperature Observations (tobs) for the previous year\"\"\"\n\tprev_year = dt.date.today() - dt.timedelta(days=365)\n\t# date_string = prev_year.strftime(\"%Y-%m-%d\")\n\n\ttobsquery = session.query(Measurement.tobs).filter(Measurement.date >= prev_year).all()\n\n\ttobslist = list(np.ravel(tobsquery))\n\n\treturn jsonify(tobslist)\n\n#executing the error handler page using 404 abort\[email protected](404)\ndef page_not_found(e):\n \n\treturn (\"<h2> 404: Page Not Found </h2>\"\n\t\t\t\"Please enter a date in database range: <b>2010-01-01</b> to <b>2017-08-23</b>\"),404\n\n\n\[email protected]('/api/v1.0/<start>', methods=[\"GET\"])\n\n# Return a json list of the minimum temperature, the average temperature, and the max temperature for a given start or start-end range.\n# When given the start only, calculate TMIN, TAVG, and TMAX for all dates greater than and equal to the start date.\n# When given the start and the end date, calculate the TMIN, TAVG, and TMAX for dates between the start and end date inclusive.\n\n\ndef tobsinfo_start(start):\n\t\n\t# daterange = [date for dates in session.query(Measurement.date).all()]\n\ttry:\n\t\tif start:# in daterange:\n\t\t\t# start = func.strftime('%Y-%m-%d', 'start')\n\n\t\t\tsel = [func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)]\n\n\t\t\tcalcs = session.query(*sel).filter(func.strftime('%Y-%m-%d',Measurement.date) >= start).one()\n\n\n\t\t\treturn (\n\t\t\t\tf\"<h2> Temperature(F) informtion from {start} </h2>\"\n\t\t\t\tf\"Minimum temp: {calcs[0]}<br>\"\n\t\t\t\tf\"Average temp: {round(calcs[1],2)}<br>\"\n\t\t\t\tf\"Maximum temp: {round(calcs[2],2)}<br>\"\n\t\t\t\t)\n\texcept:\n\t\tabort(404)\n\t\t\n\n\[email protected](\"/api/v1.0/tobs/<startDate>/<endDate>\")\n\ndef getTempObs(startDate,endDate):\n\n \"\"\"Return the date and temperateure for 2017\"\"\"\n\n # Query all the date and the temperature details\n\n results = session.query(Measurement.tobs). filter(Measurement.date >= startDate).filter(Measurement.date <= endDate).all()\n\n # Convert list of tuples into normal list\n\n all_names = list(np.ravel(results))\n\n\n\n return jsonify(all_names)\n\n\n# 12. Get the temperature stats for given date\n\[email protected](\"/api/v1.0/<startDate>/<endDate>\")\n\[email protected](\"/api/v1.0/<startDate>\")\n\ndef getTempStats(startDate,endDate='2018-31-12'):\n\n \"\"\"Return temperature stats\"\"\"\n\n #If end date is not given\n\n if endDate == '2018-31-12':\n\n results = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\\\n filter(Measurement.date >= startDate).all()\n\n else: \n\n # Query all the date and the temperature details og\n\n results = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)).\\\n filter(Measurement.date >= startDate).filter(Measurement.date <= endDate).all()\n\n # Convert list of tuples into normal list\n\n \n\n all_names = list(np.ravel(results))\n\n return jsonify(all_names)\n\nif __name__ == '__main__':\n\tapp.run(debug=True)\n",
"step-ids": [
3,
8,
9,
10,
12
]
}
|
[
3,
8,
9,
10,
12
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.