blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
20cf578ea6073fca3dfa9da09f0e4bc6c7d586bb | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnflv.py | 83031709fddd620f39b5c267f38413dd5128695d | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 44 | py | ii = [('RogePAV2.py', 1), ('KnowJMM.py', 1)] | [
"[email protected]"
] | |
309daa07bda7b7eb3525eaae9158ed897a52e6ba | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_209/611.py | 0d8528d812df07b274136b7d9c4ebae4a50cbb69 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,235 | py | t = input()
from collections import defaultdict
def solve(pancakes, n, k, dp):
# print "calling for {0}, {1}".format(n, k)
if ((n, k)) in dp:
# print 'returning {0} for {1}, {2}'.format(dp[(n, k)], n, k)
return dp[(n, k)]
if n == 0:
# print 'returning {0} for {1}, {2}'.format(0, n, k)
return 0
if k < 0:
# print 'returning {0} for {1}, {2}'.format(9876543211111111, n, k)
return -9876543211111111
if n == 1:
p = pancakes[k]
dp[(n, k)] = p[0]**2 + 2*p[0]*p[1]
# print p[0]**2 + 2*p[0]*p[1]
# print 'returning {0} for {1}, {2}'.format(p[0]**2 + p[0]*p[1], n, k)
return p[0]**2 + 2*p[0]*p[1]
max_ = -1
for cand in xrange(0, k):
max_ = max(max_, solve(pancakes, n-1, cand, dp))
p = pancakes[k]
dp[(n, k)] = max_ + 2*p[0]*p[1]
# print 'returning {0} for {1}, {2}'.format(dp[(n, k)], n, k)
return dp[(n, k)]
for idx in xrange(1, t + 1):
n, k = map(int, raw_input().split())
pancakes = []
for _ in xrange(n):
a, b = map(int, raw_input().split())
pancakes.append((a, b))
dp = defaultdict(int)
pancakes.sort(reverse=True)
# print pancakes
best = -1
for i in xrange(0, n):
best = max(best, solve(pancakes, k, i, dp))
print "Case #{0}: ".format(idx) + "%.10f" % (best*3.14159265359)
| [
"[email protected]"
] | |
cda7cfa1c3424a8cf95d97f5dc12e578a7e2e2a3 | 72d010d00355fc977a291c29eb18aeb385b8a9b0 | /BeatStep/__init__.py | 49897318ae58c967be09a28d7820359edcc8d56d | [] | no_license | maratbakirov/AbletonLive10_MIDIRemoteScripts | bf0749c5c4cce8e83b23f14f671e52752702539d | ed1174d9959b20ed05fb099f0461bbc006bfbb79 | refs/heads/master | 2021-06-16T19:58:34.038163 | 2021-05-09T11:46:46 | 2021-05-09T11:46:46 | 203,174,328 | 0 | 0 | null | 2019-08-19T13:04:23 | 2019-08-19T13:04:22 | null | UTF-8 | Python | false | false | 761 | py | # Embedded file name: /Users/versonator/Jenkins/live/output/mac_64_static/Release/python-bundle/MIDI Remote Scripts/BeatStep/__init__.py
# Compiled at: 2018-04-23 20:27:04
from __future__ import absolute_import, print_function, unicode_literals
import _Framework.Capabilities as caps
from .BeatStep import BeatStep
def get_capabilities():
return {caps.CONTROLLER_ID_KEY: caps.controller_id(vendor_id=7285, product_ids=[
518], model_name=[
'Arturia BeatStep']),
caps.PORTS_KEY: [
caps.inport(props=[caps.NOTES_CC, caps.SCRIPT, caps.REMOTE]),
caps.outport(props=[caps.SCRIPT])]}
def create_instance(c_instance):
return BeatStep(c_instance)
| [
"[email protected]"
] | |
67fc5124fecf25cc232eeef083e19524746f2a48 | 668ff51f34eac31931511ec3641c66f82bffaee5 | /myewb/contrib/haystack/backends/solr_backend.py | a93c5e0b73ec7fc31132bae044f0e8a33ca81be7 | [] | no_license | ewbcanada/myewb2 | 4a81668b37b286638ad80c9f2535770ada200cfc | 50cf7698899bed1b31d0a637f72a1e9a5b7fa07a | refs/heads/master | 2020-12-01T01:15:09.592117 | 2011-10-18T22:02:07 | 2011-10-18T22:02:07 | 380,073 | 3 | 6 | null | null | null | null | UTF-8 | Python | false | false | 19,198 | py | import logging
import sys
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models.loading import get_model
from haystack.backends import BaseSearchBackend, BaseSearchQuery, log_query, EmptyResults
from haystack.constants import ID, DJANGO_CT, DJANGO_ID
from haystack.exceptions import MissingDependency, MoreLikeThisError
from haystack.models import SearchResult
from haystack.utils import get_identifier
try:
from django.db.models.sql.query import get_proxied_model
except ImportError:
# Likely on Django 1.0
get_proxied_model = None
try:
from pysolr import Solr, SolrError
except ImportError:
raise MissingDependency("The 'solr' backend requires the installation of 'pysolr'. Please refer to the documentation.")
BACKEND_NAME = 'solr'
class SearchBackend(BaseSearchBackend):
# Word reserved by Solr for special use.
RESERVED_WORDS = (
'AND',
'NOT',
'OR',
'TO',
)
# Characters reserved by Solr for special use.
# The '\\' must come first, so as not to overwrite the other slash replacements.
RESERVED_CHARACTERS = (
'\\', '+', '-', '&&', '||', '!', '(', ')', '{', '}',
'[', ']', '^', '"', '~', '*', '?', ':',
)
def __init__(self, site=None):
super(SearchBackend, self).__init__(site)
if not hasattr(settings, 'HAYSTACK_SOLR_URL'):
raise ImproperlyConfigured('You must specify a HAYSTACK_SOLR_URL in your settings.')
timeout = getattr(settings, 'HAYSTACK_SOLR_TIMEOUT', 10)
self.conn = Solr(settings.HAYSTACK_SOLR_URL, timeout=timeout)
self.log = logging.getLogger('haystack')
def update(self, index, iterable, commit=True):
docs = []
try:
for obj in iterable:
docs.append(index.full_prepare(obj))
except UnicodeDecodeError:
sys.stderr.write("Chunk failed.\n")
if len(docs) > 0:
try:
self.conn.add(docs, commit=commit, boost=index.get_field_weights())
except (IOError, SolrError), e:
self.log.error("Failed to add documents to Solr: %s", e)
def remove(self, obj_or_string, commit=True):
solr_id = get_identifier(obj_or_string)
try:
kwargs = {
'commit': commit,
ID: solr_id
}
self.conn.delete(**kwargs)
except (IOError, SolrError), e:
self.log.error("Failed to remove document '%s' from Solr: %s", solr_id, e)
def clear(self, models=[], commit=True):
try:
if not models:
# *:* matches all docs in Solr
self.conn.delete(q='*:*', commit=commit)
else:
models_to_delete = []
for model in models:
models_to_delete.append("%s:%s.%s" % (DJANGO_CT, model._meta.app_label, model._meta.module_name))
self.conn.delete(q=" OR ".join(models_to_delete), commit=commit)
# Run an optimize post-clear. http://wiki.apache.org/solr/FAQ#head-9aafb5d8dff5308e8ea4fcf4b71f19f029c4bb99
self.conn.optimize()
except (IOError, SolrError), e:
if len(models):
self.log.error("Failed to clear Solr index of models '%s': %s", ','.join(models_to_delete), e)
else:
self.log.error("Failed to clear Solr index: %s", e)
@log_query
def search(self, query_string, sort_by=None, start_offset=0, end_offset=None,
fields='', highlight=False, facets=None, date_facets=None, query_facets=None,
narrow_queries=None, spelling_query=None,
limit_to_registered_models=None, result_class=None, **kwargs):
if len(query_string) == 0:
return {
'results': [],
'hits': 0,
}
kwargs = {
'fl': '* score',
}
if fields:
kwargs['fl'] = fields
if sort_by is not None:
kwargs['sort'] = sort_by
if start_offset is not None:
kwargs['start'] = start_offset
if end_offset is not None:
kwargs['rows'] = end_offset - start_offset
if highlight is True:
kwargs['hl'] = 'true'
kwargs['hl.fragsize'] = '200'
if getattr(settings, 'HAYSTACK_INCLUDE_SPELLING', False) is True:
kwargs['spellcheck'] = 'true'
kwargs['spellcheck.collate'] = 'true'
kwargs['spellcheck.count'] = 1
if spelling_query:
kwargs['spellcheck.q'] = spelling_query
if facets is not None:
kwargs['facet'] = 'on'
kwargs['facet.field'] = facets
if date_facets is not None:
kwargs['facet'] = 'on'
kwargs['facet.date'] = date_facets.keys()
kwargs['facet.date.other'] = 'none'
for key, value in date_facets.items():
kwargs["f.%s.facet.date.start" % key] = self.conn._from_python(value.get('start_date'))
kwargs["f.%s.facet.date.end" % key] = self.conn._from_python(value.get('end_date'))
gap_by_string = value.get('gap_by').upper()
gap_string = "%d%s" % (value.get('gap_amount'), gap_by_string)
if value.get('gap_amount') != 1:
gap_string += "S"
kwargs["f.%s.facet.date.gap" % key] = '+%s/%s' % (gap_string, gap_by_string)
if query_facets is not None:
kwargs['facet'] = 'on'
kwargs['facet.query'] = ["%s:%s" % (field, value) for field, value in query_facets]
if limit_to_registered_models is None:
limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
if limit_to_registered_models:
# Using narrow queries, limit the results to only models registered
# with the current site.
if narrow_queries is None:
narrow_queries = set()
registered_models = self.build_registered_models_list()
if len(registered_models) > 0:
narrow_queries.add('%s:(%s)' % (DJANGO_CT, ' OR '.join(registered_models)))
if narrow_queries is not None:
kwargs['fq'] = list(narrow_queries)
try:
raw_results = self.conn.search(query_string, **kwargs)
except (IOError, SolrError), e:
self.log.error("Failed to query Solr using '%s': %s", query_string, e)
raw_results = EmptyResults()
return self._process_results(raw_results, highlight=highlight, result_class=result_class)
def more_like_this(self, model_instance, additional_query_string=None,
start_offset=0, end_offset=None,
limit_to_registered_models=None, result_class=None, **kwargs):
# Handle deferred models.
if get_proxied_model and hasattr(model_instance, '_deferred') and model_instance._deferred:
model_klass = get_proxied_model(model_instance._meta)
else:
model_klass = type(model_instance)
index = self.site.get_index(model_klass)
field_name = index.get_content_field()
params = {
'fl': '*,score',
}
if start_offset is not None:
params['start'] = start_offset
if end_offset is not None:
params['rows'] = end_offset
narrow_queries = set()
if limit_to_registered_models is None:
limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
if limit_to_registered_models:
# Using narrow queries, limit the results to only models registered
# with the current site.
if narrow_queries is None:
narrow_queries = set()
registered_models = self.build_registered_models_list()
if len(registered_models) > 0:
narrow_queries.add('%s:(%s)' % (DJANGO_CT, ' OR '.join(registered_models)))
if additional_query_string:
narrow_queries.add(additional_query_string)
if narrow_queries:
params['fq'] = list(narrow_queries)
query = "%s:%s" % (ID, get_identifier(model_instance))
try:
raw_results = self.conn.more_like_this(query, field_name, **params)
except (IOError, SolrError), e:
self.log.error("Failed to fetch More Like This from Solr for document '%s': %s", query, e)
raw_results = EmptyResults()
return self._process_results(raw_results, result_class=result_class)
def _process_results(self, raw_results, highlight=False, result_class=None):
if not self.site:
from haystack import site
else:
site = self.site
results = []
hits = raw_results.hits
facets = {}
spelling_suggestion = None
if result_class is None:
result_class = SearchResult
if hasattr(raw_results, 'facets'):
facets = {
'fields': raw_results.facets.get('facet_fields', {}),
'dates': raw_results.facets.get('facet_dates', {}),
'queries': raw_results.facets.get('facet_queries', {}),
}
for key in ['fields']:
for facet_field in facets[key]:
# Convert to a two-tuple, as Solr's json format returns a list of
# pairs.
facets[key][facet_field] = zip(facets[key][facet_field][::2], facets[key][facet_field][1::2])
if getattr(settings, 'HAYSTACK_INCLUDE_SPELLING', False) is True:
if hasattr(raw_results, 'spellcheck'):
if len(raw_results.spellcheck.get('suggestions', [])):
# For some reason, it's an array of pairs. Pull off the
# collated result from the end.
spelling_suggestion = raw_results.spellcheck.get('suggestions')[-1]
indexed_models = site.get_indexed_models()
for raw_result in raw_results.docs:
app_label, model_name = raw_result[DJANGO_CT].split('.')
additional_fields = {}
model = get_model(app_label, model_name)
if model and model in indexed_models:
for key, value in raw_result.items():
index = site.get_index(model)
string_key = str(key)
if string_key in index.fields and hasattr(index.fields[string_key], 'convert'):
additional_fields[string_key] = index.fields[string_key].convert(value)
else:
additional_fields[string_key] = self.conn._to_python(value)
del(additional_fields[DJANGO_CT])
del(additional_fields[DJANGO_ID])
del(additional_fields['score'])
if raw_result[ID] in getattr(raw_results, 'highlighting', {}):
additional_fields['highlighted'] = raw_results.highlighting[raw_result[ID]]
result = result_class(app_label, model_name, raw_result[DJANGO_ID], raw_result['score'], searchsite=self.site, **additional_fields)
results.append(result)
else:
hits -= 1
return {
'results': results,
'hits': hits,
'facets': facets,
'spelling_suggestion': spelling_suggestion,
}
def build_schema(self, fields):
content_field_name = ''
schema_fields = []
for field_name, field_class in fields.items():
field_data = {
'field_name': field_class.index_fieldname,
'type': 'text',
'indexed': 'true',
'stored': 'true',
'multi_valued': 'false',
}
if field_class.document is True:
content_field_name = field_class.index_fieldname
# DRL_FIXME: Perhaps move to something where, if none of these
# checks succeed, call a custom method on the form that
# returns, per-backend, the right type of storage?
if field_class.field_type in ['date', 'datetime']:
field_data['type'] = 'date'
elif field_class.field_type == 'integer':
field_data['type'] = 'slong'
elif field_class.field_type == 'float':
field_data['type'] = 'sfloat'
elif field_class.field_type == 'boolean':
field_data['type'] = 'boolean'
elif field_class.field_type == 'ngram':
field_data['type'] = 'ngram'
elif field_class.field_type == 'edge_ngram':
field_data['type'] = 'edge_ngram'
if field_class.is_multivalued:
field_data['multi_valued'] = 'true'
if field_class.stored is False:
field_data['stored'] = 'false'
# Do this last to override `text` fields.
if field_class.indexed is False:
field_data['indexed'] = 'false'
# If it's text and not being indexed, we probably don't want
# to do the normal lowercase/tokenize/stemming/etc. dance.
if field_data['type'] == 'text':
field_data['type'] = 'string'
# If it's a ``FacetField``, make sure we don't postprocess it.
if hasattr(field_class, 'facet_for'):
# If it's text, it ought to be a string.
if field_data['type'] == 'text':
field_data['type'] = 'string'
schema_fields.append(field_data)
return (content_field_name, schema_fields)
class SearchQuery(BaseSearchQuery):
def __init__(self, site=None, backend=None):
super(SearchQuery, self).__init__(site, backend)
if backend is not None:
self.backend = backend
else:
self.backend = SearchBackend(site=site)
def matching_all_fragment(self):
return '*:*'
def build_query_fragment(self, field, filter_type, value):
result = ''
# Handle when we've got a ``ValuesListQuerySet``...
if hasattr(value, 'values_list'):
value = list(value)
if not isinstance(value, (set, list, tuple)):
# Convert whatever we find to what pysolr wants.
value = self.backend.conn._from_python(value)
# Check to see if it's a phrase for an exact match.
if ' ' in value:
value = '"%s"' % value
index_fieldname = self.backend.site.get_index_fieldname(field)
# 'content' is a special reserved word, much like 'pk' in
# Django's ORM layer. It indicates 'no special field'.
if field == 'content':
result = value
else:
filter_types = {
'exact': "%s:%s",
'gt': "%s:{%s TO *}",
'gte': "%s:[%s TO *]",
'lt': "%s:{* TO %s}",
'lte': "%s:[* TO %s]",
'startswith': "%s:%s*",
}
if filter_type == 'in':
in_options = []
for possible_value in value:
in_options.append('%s:"%s"' % (index_fieldname, self.backend.conn._from_python(possible_value)))
result = "(%s)" % " OR ".join(in_options)
elif filter_type == 'range':
start = self.backend.conn._from_python(value[0])
end = self.backend.conn._from_python(value[1])
return "%s:[%s TO %s]" % (index_fieldname, start, end)
else:
result = filter_types[filter_type] % (index_fieldname, value)
return result
def run(self, spelling_query=None):
"""Builds and executes the query. Returns a list of search results."""
final_query = self.build_query()
kwargs = {
'start_offset': self.start_offset,
'result_class': self.result_class,
}
if self.order_by:
order_by_list = []
for order_by in self.order_by:
if order_by.startswith('-'):
order_by_list.append('%s desc' % order_by[1:])
else:
order_by_list.append('%s asc' % order_by)
kwargs['sort_by'] = ", ".join(order_by_list)
if self.end_offset is not None:
kwargs['end_offset'] = self.end_offset
if self.highlight:
kwargs['highlight'] = self.highlight
if self.facets:
kwargs['facets'] = list(self.facets)
if self.date_facets:
kwargs['date_facets'] = self.date_facets
if self.query_facets:
kwargs['query_facets'] = self.query_facets
if self.narrow_queries:
kwargs['narrow_queries'] = self.narrow_queries
if spelling_query:
kwargs['spelling_query'] = spelling_query
results = self.backend.search(final_query, **kwargs)
self._results = results.get('results', [])
self._hit_count = results.get('hits', 0)
self._facet_counts = self.post_process_facets(results)
self._spelling_suggestion = results.get('spelling_suggestion', None)
def run_mlt(self):
"""Builds and executes the query. Returns a list of search results."""
if self._more_like_this is False or self._mlt_instance is None:
raise MoreLikeThisError("No instance was provided to determine 'More Like This' results.")
additional_query_string = self.build_query()
kwargs = {
'start_offset': self.start_offset,
'result_class': self.result_class,
}
if self.end_offset is not None:
kwargs['end_offset'] = self.end_offset - self.start_offset
results = self.backend.more_like_this(self._mlt_instance, additional_query_string, **kwargs)
self._results = results.get('results', [])
self._hit_count = results.get('hits', 0)
| [
"[email protected]"
] | |
e92d33a3da585f26e3b3a9469af88fc980b959e5 | f3b233e5053e28fa95c549017bd75a30456eb50c | /p38a_input/L3FQ/3FQ-2Z_MD_NVT_rerun/set_1ns_equi_2.py | 161c95b5097a3036c69d374da8142c6f04ce088a | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | import os
dir = '/mnt/scratch/songlin3/run/p38a/L3FQ/MD_NVT_rerun/ti_one-step/3FQ_2Z/'
filesdir = dir + 'files/'
temp_equiin = filesdir + 'temp_equi_2.in'
temp_pbs = filesdir + 'temp_1ns_equi_2.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#equiin
eqin = workdir + "%6.5f_equi_2.in" %(j)
os.system("cp %s %s" %(temp_equiin, eqin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, eqin))
#PBS
pbs = workdir + "%6.5f_1ns_equi_2.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"[email protected]"
] | |
42cf064d1f9437f636bbc909d434fb6771d510ca | dd3b8bd6c9f6f1d9f207678b101eff93b032b0f0 | /basis/AbletonLive10.1_MIDIRemoteScripts/Push2/browser_list.py | 6c9e839bb69da206779694dbbdc57b2e2d1b4c9d | [] | no_license | jhlax/les | 62955f57c33299ebfc4fca8d0482b30ee97adfe7 | d865478bf02778e509e61370174a450104d20a28 | refs/heads/master | 2023-08-17T17:24:44.297302 | 2019-12-15T08:13:29 | 2019-12-15T08:13:29 | 228,120,861 | 3 | 0 | null | 2023-08-03T16:40:44 | 2019-12-15T03:02:27 | Python | UTF-8 | Python | false | false | 3,765 | py | # uncompyle6 version 3.4.1
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.16 (v2.7.16:413a49145e, Mar 2 2019, 14:32:10)
# [GCC 4.2.1 Compatible Apple LLVM 6.0 (clang-600.0.57)]
# Embedded file name: /Users/versonator/Jenkins/live/output/mac_64_static/Release/python-bundle/MIDI Remote Scripts/Push2/browser_list.py
# Compiled at: 2019-04-09 19:23:44
from __future__ import absolute_import, print_function, unicode_literals
import Live
from itertools import islice
from ableton.v2.base import EventObject, listenable_property, clamp, nop
from .model.uniqueid import UniqueIdMixin
class BrowserList(EventObject, UniqueIdMixin):
LAZY_ACCESS_COUNT = 1000
LAZY_ACCESS_THRESHOLD = LAZY_ACCESS_COUNT - 100
def __init__(self, item_iterator=None, item_wrapper=nop, limit=-1, *a, **k):
super(BrowserList, self).__init__(*a, **k)
self._selected_index = -1
self._item_iterator = item_iterator
self._item_wrapper = item_wrapper
self._limit = limit
self._access_all = False
self._items = []
self._update_items()
assert self.LAZY_ACCESS_COUNT > self.LAZY_ACCESS_THRESHOLD
def _get_limit(self):
return self._limit
def _set_limit(self, value):
if value != self._limit:
self._limit = value
self._access_all = False
self._update_items()
self.notify_items()
if value != -1:
self.selected_index = -1
limit = property(_get_limit, _set_limit)
def _get_access_all(self):
return self._access_all
def _set_access_all(self, access_all):
if self._access_all != access_all:
self._access_all = access_all
self._limit = -1
self._update_items()
self.notify_items()
access_all = property(_get_access_all, _set_access_all)
@listenable_property
def items(self):
if self.limit > 0:
return self._items[:self.limit]
if not self._access_all:
return self._items[:self.LAZY_ACCESS_COUNT]
return self._items
def _update_items(self):
if isinstance(self._item_iterator, Live.Browser.BrowserItemIterator):
if self.limit > 0 and len(self._items) < self.limit:
next_slice = islice(self._item_iterator, self.limit)
elif not self._access_all and len(self._items) < self.LAZY_ACCESS_COUNT:
next_slice = islice(self._item_iterator, self.LAZY_ACCESS_COUNT - len(self._items))
else:
next_slice = self._item_iterator
self._items.extend(map(self._item_wrapper, next_slice))
elif len(self._items) < len(self._item_iterator):
self._items = map(self._item_wrapper, self._item_iterator)
@property
def selected_item(self):
if self.selected_index == -1:
return None
else:
return self.items[self.selected_index]
@listenable_property
def selected_index(self):
return self._selected_index
@selected_index.setter
def selected_index(self, value):
if value != self._selected_index:
assert value == -1 or self._limit == -1
num_children = len(self._items)
if value < -1 or value >= num_children:
raise IndexError('Index %i must be in [-1..%i]' % (value, num_children - 1))
self._selected_index = value
self.notify_selected_index()
if self._selected_index >= self.LAZY_ACCESS_THRESHOLD and not self._access_all:
self.access_all = True
def select_index_with_offset(self, offset):
self.selected_index = clamp(self._selected_index + offset, 0, len(self._items) - 1) | [
"[email protected]"
] | |
1fadf1c096fcdb36625f731a3599aee37afaedfa | 8b321ef16f11701f66898cb5b0e186bed50aaf46 | /6/3_Gems_admin_page/Gems/urls.py | 0509da185d5d17d740ef0167d6c3f10070c07a5e | [] | no_license | TrellixVulnTeam/django_H5IO | 6bed043acfda0e3373fe5b352805f1c11b038e2a | 1ee5407e93f1bbbaeb12b90b7c5d7b96e3ba13e2 | refs/heads/master | 2023-03-16T05:25:59.900947 | 2017-04-18T08:21:47 | 2017-04-18T08:21:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,296 | py | """Gems URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from mainApp.views import *
from userManagementApp.views import *
from adminApp.views import *
urlpatterns = [
url(r'^$', main),
]
urlpatterns += [
url(r'^user/login/$', login),
url(r'^user/logout/$', logout),
# url(r'^user/registration/$', registration_low),
url(r'^user/registration/$', registration),
url(r'^admin/$', admin_page),
url(r'^admin/delete/user/(\d+)$', delete_user),
]
# Данный подход нерекомендуется, и будет убран в django 1.10
# urlpatterns = patterns('mainApp.views',
# url(r'^$', 'main'),
# )
| [
"[email protected]"
] | |
9a1e3984821d93a2119fe1e81d2bbc7589548289 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/hcipsec/entity.py | bdce01d6f76c3216322d49e73d93fa048d3e5e28 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,859 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class Entity(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.hcipsec.Entity")
meta.moClassName = "hcipsecEntity"
meta.rnFormat = "ipsec"
meta.category = MoCategory.REGULAR
meta.label = "Entity"
meta.writeAccessMask = 0x8021002001
meta.readAccessMask = 0x8021002001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.childClasses.add("cobra.model.tag.Tag")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.aaa.RbacAnnotation")
meta.childClasses.add("cobra.model.hcipsec.Inst")
meta.childClasses.add("cobra.model.tag.Annotation")
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Annotation", "annotationKey-"))
meta.childNamesAndRnPrefix.append(("cobra.model.aaa.RbacAnnotation", "rbacDom-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Tag", "tagKey-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.hcipsec.Inst", "inst"))
meta.parentClasses.add("cobra.model.hcloud.Csr")
meta.rnPrefixes = [
('ipsec', False),
]
prop = PropMeta("str", "annotation", "annotation", 51604, PropCategory.REGULAR)
prop.label = "Annotation. Suggested format orchestrator:value"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("annotation", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "extMngdBy", "extMngdBy", 51605, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "undefined"
prop._addConstant("msc", "msc", 1)
prop._addConstant("undefined", "undefined", 0)
meta.props.add("extMngdBy", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "uid", "uid", 8, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("uid", prop)
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcbgpPeer", "From hcloudCsr to hcbgpPeer", "cobra.model.hcbgp.Peer"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcl3LoopbackRoutedIf", "From hcloudCsr to hcl3LoopbackRoutedIf", "cobra.model.hcl3.LoopbackRoutedIf"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcospfRsTunnIf", "From hcloudCsr to hcospfRsTunnIf", "cobra.model.hcospf.RsTunnIf"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHctunnIf", "From hcloudCsr to hctunnIf", "cobra.model.hctunn.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcloudNetworkInterface", "From hcloudCsr to hcloudNetworkInterface", "cobra.model.hcloud.NetworkInterface"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcloudEndPointOper", "From hcloudCsr to hcloudEndPointOper", "cobra.model.hcloud.EndPointOper"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcl1PhysicalIf", "From hcloudCsr to hcl1PhysicalIf", "cobra.model.hcl1.PhysicalIf"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToCloudCtxProfile", "From hcloudCsr to cloudCtxProfile", "cobra.model.cloud.CtxProfile"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcl3Vrf", "From hcloudCsr to hcl3Vrf", "cobra.model.hcl3.Vrf"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("HcloudCsrToHcloudInstanceOper", "From hcloudCsr to hcloudInstanceOper", "cobra.model.hcloud.InstanceOper"))
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
f6160ac2811a35c0f86566abde0bb2c6d5321a80 | 2a6f1afa7678e5d76efe01b1474eda59d442ae0f | /venv/Lib/site-packages/jesse/__init__.py | b199a71f6bf197fa4922b5ebba279d9913858687 | [] | no_license | cagridincel/CagriTrade | 6b50c785efc3eb43487724be59511a5850a92145 | 86839e6604eb18850f6410acf5f6993da59b74ec | refs/heads/master | 2023-03-03T09:16:29.965177 | 2021-02-16T13:01:18 | 2021-02-16T13:01:18 | 338,672,106 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,925 | py | import os
import sys
from pydoc import locate
import click
import pkg_resources
import jesse.helpers as jh
# Hide the "FutureWarning: pandas.util.testing is deprecated." caused by empyrical
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
# Python version validation.
if jh.python_version() < 3.7:
print(
jh.color(
'Jesse requires Python version above 3.7. Yours is {}'.format(jh.python_version()),
'red'
)
)
# fix directory issue
sys.path.insert(0, os.getcwd())
ls = os.listdir('.')
is_jesse_project = 'strategies' in ls and 'config.py' in ls and 'storage' in ls and 'routes.py' in ls
def validate_cwd():
"""
make sure we're in a Jesse project
"""
if not is_jesse_project:
print(
jh.color(
'Current directory is not a Jesse project. You must run commands from the root of a Jesse project.',
'red'
)
)
os._exit(1)
def inject_local_config():
"""
injects config from local config file
"""
local_config = locate('config.config')
from jesse.config import set_config
set_config(local_config)
def inject_local_routes():
"""
injects routes from local routes folder
"""
local_router = locate('routes')
from jesse.routes import router
router.set_routes(local_router.routes)
router.set_extra_candles(local_router.extra_candles)
# inject local files
if is_jesse_project:
inject_local_config()
inject_local_routes()
def register_custom_exception_handler():
"""
:return:
"""
import sys
import threading
import traceback
import logging
from jesse.services import logger as jesse_logger
import click
from jesse import exceptions
log_format = "%(message)s"
os.makedirs('storage/logs', exist_ok=True)
if jh.is_livetrading():
logging.basicConfig(filename='storage/logs/live-trade.txt', level=logging.INFO,
filemode='w', format=log_format)
elif jh.is_paper_trading():
logging.basicConfig(filename='storage/logs/paper-trade.txt', level=logging.INFO,
filemode='w',
format=log_format)
elif jh.is_collecting_data():
logging.basicConfig(filename='storage/logs/collect.txt', level=logging.INFO, filemode='w',
format=log_format)
elif jh.is_optimizing():
logging.basicConfig(filename='storage/logs/optimize.txt', level=logging.INFO, filemode='w',
format=log_format)
else:
logging.basicConfig(level=logging.INFO)
# main thread
def handle_exception(exc_type, exc_value, exc_traceback):
"""
:param exc_type:
:param exc_value:
:param exc_traceback:
:return:
"""
if issubclass(exc_type, KeyboardInterrupt):
sys.excepthook(exc_type, exc_value, exc_traceback)
return
# handle Breaking exceptions
if exc_type in [
exceptions.InvalidConfig, exceptions.RouteNotFound, exceptions.InvalidRoutes,
exceptions.CandleNotFoundInDatabase
]:
click.clear()
print('=' * 30 + ' EXCEPTION TRACEBACK:')
traceback.print_tb(exc_traceback, file=sys.stdout)
print("=" * 73)
print(
'\n',
jh.color('Uncaught Exception:', 'red'),
jh.color('{}: {}'.format(exc_type.__name__, exc_value), 'yellow')
)
return
# send notifications if it's a live session
if jh.is_live():
jesse_logger.error(
'{}: {}'.format(exc_type.__name__, exc_value)
)
if jh.is_live() or jh.is_collecting_data():
logging.error("Uncaught Exception:", exc_info=(exc_type, exc_value, exc_traceback))
else:
print('=' * 30 + ' EXCEPTION TRACEBACK:')
traceback.print_tb(exc_traceback, file=sys.stdout)
print("=" * 73)
print(
'\n',
jh.color('Uncaught Exception:', 'red'),
jh.color('{}: {}'.format(exc_type.__name__, exc_value), 'yellow')
)
if jh.is_paper_trading():
print(
jh.color(
'An uncaught exception was raised. Check the log file at:\n{}'.format(
'storage/logs/paper-trade.txt'
),
'red'
)
)
elif jh.is_livetrading():
print(
jh.color(
'An uncaught exception was raised. Check the log file at:\n{}'.format(
'storage/logs/live-trade.txt'
),
'red'
)
)
elif jh.is_collecting_data():
print(
jh.color(
'An uncaught exception was raised. Check the log file at:\n{}'.format(
'storage/logs/collect.txt'
),
'red'
)
)
sys.excepthook = handle_exception
# other threads
if jh.python_version() >= 3.8:
def handle_thread_exception(args):
"""
:param args:
:return:
"""
if args.exc_type == SystemExit:
return
# handle Breaking exceptions
if args.exc_type in [
exceptions.InvalidConfig, exceptions.RouteNotFound, exceptions.InvalidRoutes,
exceptions.CandleNotFoundInDatabase
]:
click.clear()
print('=' * 30 + ' EXCEPTION TRACEBACK:')
traceback.print_tb(args.exc_traceback, file=sys.stdout)
print("=" * 73)
print(
'\n',
jh.color('Uncaught Exception:', 'red'),
jh.color('{}: {}'.format(args.exc_type.__name__, args.exc_value), 'yellow')
)
return
# send notifications if it's a live session
if jh.is_live():
jesse_logger.error(
'{}: {}'.format(args.exc_type.__name__, args.exc_value)
)
if jh.is_live() or jh.is_collecting_data():
logging.error("Uncaught Exception:",
exc_info=(args.exc_type, args.exc_value, args.exc_traceback))
else:
print('=' * 30 + ' EXCEPTION TRACEBACK:')
traceback.print_tb(args.exc_traceback, file=sys.stdout)
print("=" * 73)
print(
'\n',
jh.color('Uncaught Exception:', 'red'),
jh.color('{}: {}'.format(args.exc_type.__name__, args.exc_value), 'yellow')
)
if jh.is_paper_trading():
print(
jh.color(
'An uncaught exception was raised. Check the log file at:\n{}'.format(
'storage/logs/paper-trade.txt'
),
'red'
)
)
elif jh.is_livetrading():
print(
jh.color(
'An uncaught exception was raised. Check the log file at:\n{}'.format(
'storage/logs/live-trade.txt'
),
'red'
)
)
elif jh.is_collecting_data():
print(
jh.color(
'An uncaught exception was raised. Check the log file at:\n{}'.format(
'storage/logs/collect.txt'
),
'red'
)
)
threading.excepthook = handle_thread_exception
# create a Click group
@click.group()
@click.version_option(pkg_resources.get_distribution("jesse").version)
def cli():
pass
@cli.command()
@click.argument('exchange', required=True, type=str)
@click.argument('symbol', required=True, type=str)
@click.argument('start_date', required=True, type=str)
@click.option('--skip-confirmation', is_flag=True,
help="Will prevent confirmation for skipping duplicates")
def import_candles(exchange, symbol, start_date, skip_confirmation):
"""
imports historical candles from exchange
"""
validate_cwd()
from jesse.config import config
config['app']['trading_mode'] = 'import-candles'
register_custom_exception_handler()
from jesse.services import db
from jesse.modes import import_candles_mode
import_candles_mode.run(exchange, symbol, start_date, skip_confirmation)
db.close_connection()
@cli.command()
@click.argument('start_date', required=True, type=str)
@click.argument('finish_date', required=True, type=str)
@click.option('--debug/--no-debug', default=False,
help='Displays logging messages instead of the progressbar. Used for debugging your strategy.')
@click.option('--csv/--no-csv', default=False,
help='Outputs a CSV file of all executed trades on completion.')
@click.option('--json/--no-json', default=False,
help='Outputs a JSON file of all executed trades on completion.')
@click.option('--fee/--no-fee', default=True,
help='You can use "--no-fee" as a quick way to set trading fee to zero.')
@click.option('--chart/--no-chart', default=False,
help='Generates charts of daily portfolio balance and assets price change. Useful for a visual comparision of your portfolio against the market.')
@click.option('--tradingview/--no-tradingview', default=False,
help="Generates an output that can be copy-and-pasted into tradingview.com's pine-editor too see the trades in their charts.")
def backtest(start_date, finish_date, debug, csv, json, fee, chart, tradingview):
"""
backtest mode. Enter in "YYYY-MM-DD" "YYYY-MM-DD"
"""
validate_cwd()
from jesse.config import config
config['app']['trading_mode'] = 'backtest'
register_custom_exception_handler()
from jesse.services import db
from jesse.modes import backtest_mode
from jesse.services.selectors import get_exchange
# debug flag
config['app']['debug_mode'] = debug
# fee flag
if not fee:
for e in config['app']['trading_exchanges']:
config['env']['exchanges'][e]['fee'] = 0
get_exchange(e).fee = 0
backtest_mode.run(start_date, finish_date, chart=chart, tradingview=tradingview, csv=csv,
json=json)
db.close_connection()
@cli.command()
@click.argument('start_date', required=True, type=str)
@click.argument('finish_date', required=True, type=str)
@click.argument('optimal_total', required=True, type=int)
@click.option(
'--cpu', default=0, show_default=True,
help='The number of CPU cores that Jesse is allowed to use. If set to 0, it will use as many as is available on your machine.')
@click.option(
'--debug/--no-debug', default=False,
help='Displays detailed logs about the genetics algorithm. Use it if you are interested int he genetics algorithm.'
)
@click.option('--csv/--no-csv', default=False, help='Outputs a CSV file of all DNAs on completion.')
@click.option('--json/--no-json', default=False, help='Outputs a JSON file of all DNAs on completion.')
def optimize(start_date, finish_date, optimal_total, cpu, debug, csv, json):
"""
tunes the hyper-parameters of your strategy
"""
validate_cwd()
from jesse.config import config
config['app']['trading_mode'] = 'optimize'
register_custom_exception_handler()
# debug flag
config['app']['debug_mode'] = debug
from jesse.modes.optimize_mode import optimize_mode
optimize_mode(start_date, finish_date, optimal_total, cpu, csv, json)
@cli.command()
@click.argument('name', required=True, type=str)
def make_strategy(name):
"""
generates a new strategy folder from jesse/strategies/ExampleStrategy
"""
validate_cwd()
from jesse.config import config
config['app']['trading_mode'] = 'make-strategy'
register_custom_exception_handler()
from jesse.services import strategy_maker
strategy_maker.generate(name)
@cli.command()
@click.argument('name', required=True, type=str)
def make_project(name):
"""
generates a new strategy folder from jesse/strategies/ExampleStrategy
"""
from jesse.config import config
config['app']['trading_mode'] = 'make-project'
register_custom_exception_handler()
from jesse.services import project_maker
project_maker.generate(name)
@cli.command()
@click.option('--dna/--no-dna', default=False,
help='Translates DNA into parameters. Used in optimize mode only')
def routes(dna):
"""
lists all routes
"""
validate_cwd()
from jesse.config import config
config['app']['trading_mode'] = 'routes'
register_custom_exception_handler()
from jesse.modes import routes_mode
routes_mode.run(dna)
if 'plugins' in ls:
@cli.command()
def collect():
"""
fetches streamed market data such as tickers, trades, and orderbook from
the WS connection and stores them into the database for later research.
"""
validate_cwd()
# set trading mode
from jesse.config import config
config['app']['trading_mode'] = 'collect'
register_custom_exception_handler()
from plugins.live.collect_mode import run
run()
@cli.command()
@click.option('--testdrive/--no-testdrive', default=False)
@click.option('--debug/--no-debug', default=False)
@click.option('--dev/--no-dev', default=False)
@click.option('--fee/--no-fee', default=True)
def live(testdrive, debug, dev, fee):
"""
trades in real-time on exchange with REAL money
"""
validate_cwd()
# set trading mode
from jesse.config import config
config['app']['trading_mode'] = 'livetrade'
config['app']['is_test_driving'] = testdrive
register_custom_exception_handler()
# debug flag
config['app']['debug_mode'] = debug
from plugins.live import init
from jesse.services.selectors import get_exchange
# fee flag
if not fee:
for e in config['app']['trading_exchanges']:
config['env']['exchanges'][e]['fee'] = 0
get_exchange(e).fee = 0
# inject live config
init(config)
# execute live session
from plugins.live.live_mode import run
run(dev)
@cli.command()
@click.option('--debug/--no-debug', default=False)
@click.option('--dev/--no-dev', default=False)
@click.option('--fee/--no-fee', default=True)
def paper(debug, dev, fee):
"""
trades in real-time on exchange with PAPER money
"""
validate_cwd()
# set trading mode
from jesse.config import config
config['app']['trading_mode'] = 'papertrade'
register_custom_exception_handler()
# debug flag
config['app']['debug_mode'] = debug
from plugins.live import init
from jesse.services.selectors import get_exchange
# fee flag
if not fee:
for e in config['app']['trading_exchanges']:
config['env']['exchanges'][e]['fee'] = 0
get_exchange(e).fee = 0
# inject live config
init(config)
# execute live session
from plugins.live.live_mode import run
run(dev)
| [
"[email protected]"
] | |
27985cba501ed020a79ad113c0a1a0bdf10f2538 | 28c614942558229bb9adca33070331b04d454015 | /py/schwenk.py | ea995519b24521c1300e79ca9af769b144f98a1e | [] | no_license | qdv/Colorly | 95827b077b888251dea3a2ed58e8a37e98837409 | 6891a2d550a66e374c5da441b452256abccaffad | refs/heads/gh-pages | 2021-05-28T02:57:53.409957 | 2014-11-12T03:00:26 | 2014-11-12T03:00:26 | 100,415,084 | 1 | 0 | null | 2017-08-15T20:05:44 | 2017-08-15T20:05:44 | null | UTF-8 | Python | false | false | 33,254 | py | PALETTE = [
{
"name": "Y1-2-03",
"label": "y1-2-03",
"hex": "#efc84f"
},
{
"name": "Y1-2-04",
"label": "y1-2-04",
"hex": "#fae698"
},
{
"name": "Y1-2-05",
"label": "y1-2-05",
"hex": "#f9e599"
},
{
"name": "Y1-2-06",
"label": "y1-2-06",
"hex": "#fdf6cd"
},
{
"name": "Y1-3-03",
"label": "y1-3-03",
"hex": "#e4bb4a"
},
{
"name": "Y1-3-04",
"label": "y1-3-04",
"hex": "#f2d67d"
},
{
"name": "Y1-3-05",
"label": "y1-3-05",
"hex": "#f3d982"
},
{
"name": "Y1-3-06",
"label": "y1-3-06",
"hex": "#f9e6a1"
},
{
"name": "Y1-3-07",
"label": "y1-3-07",
"hex": "#fff2c0"
},
{
"name": "Y1-4-03",
"label": "y1-4-03",
"hex": "#d6ae52"
},
{
"name": "Y1-4-04",
"label": "y1-4-04",
"hex": "#deb85c"
},
{
"name": "Y1-4-05",
"label": "y1-4-05",
"hex": "#e6c875"
},
{
"name": "Y1-4-06",
"label": "y1-4-06",
"hex": "#ecd78f"
},
{
"name": "Y1-4-07",
"label": "y1-4-07",
"hex": "#f3e1a1"
},
{
"name": "Y1-4-08",
"label": "y1-4-08",
"hex": "#f9eebc"
},
{
"name": "Y1-5-03",
"label": "y1-5-03",
"hex": "#c0a05d"
},
{
"name": "Y1-5-04",
"label": "y1-5-04",
"hex": "#d0b270"
},
{
"name": "Y1-5-05",
"label": "y1-5-05",
"hex": "#d8c085"
},
{
"name": "Y1-5-06",
"label": "y1-5-06",
"hex": "#decb95"
},
{
"name": "Y1-5-07",
"label": "y1-5-07",
"hex": "#e5d6a3"
},
{
"name": "Y1-5-08",
"label": "y1-5-08",
"hex": "#eee2b4"
},
{
"name": "Y1-6-03",
"label": "y1-6-03",
"hex": "#b49965"
},
{
"name": "Y1-6-04",
"label": "y1-6-04",
"hex": "#c0a570"
},
{
"name": "Y1-6-05",
"label": "y1-6-05",
"hex": "#cbb280"
},
{
"name": "Y1-6-06",
"label": "y1-6-06",
"hex": "#d5bf91"
},
{
"name": "Y1-6-07",
"label": "y1-6-07",
"hex": "#e0cfa3"
},
{
"name": "Y1-6-08",
"label": "y1-6-08",
"hex": "#e9ddb6"
},
{
"name": "Y1-6-09",
"label": "y1-6-09",
"hex": "#f2eacb"
},
{
"name": "YG-03",
"label": "yg-03",
"hex": "#827460"
},
{
"name": "YG-04",
"label": "yg-04",
"hex": "#8f816e"
},
{
"name": "YG-05",
"label": "yg-05",
"hex": "#9f927f"
},
{
"name": "YG-06",
"label": "yg-06",
"hex": "#ab9f8c"
},
{
"name": "YG-07",
"label": "yg-07",
"hex": "#bcb09d"
},
{
"name": "YG-08",
"label": "yg-08",
"hex": "#c9bfae"
},
{
"name": "YG-09",
"label": "yg-09",
"hex": "#d9d1c3"
},
{
"name": "YG-10",
"label": "yg-10",
"hex": "#e5dfd3"
},
{
"name": "YG-11",
"label": "yg-11",
"hex": "#f3eee4"
},
{
"name": "Y2-6-03",
"label": "y2-6-03",
"hex": "#9b7a53"
},
{
"name": "Y2-6-04",
"label": "y2-6-04",
"hex": "#a98960"
},
{
"name": "Y2-6-05",
"label": "y2-6-05",
"hex": "#b4956c"
},
{
"name": "Y2-6-06",
"label": "y2-6-06",
"hex": "#bca079"
},
{
"name": "Y2-6-07",
"label": "y2-6-07",
"hex": "#cdb28a"
},
{
"name": "Y2-6-08",
"label": "y2-6-08",
"hex": "#d6bf9a"
},
{
"name": "Y2-6-09",
"label": "y2-6-09",
"hex": "#decba9"
},
{
"name": "Y2-6-10",
"label": "y2-6-10",
"hex": "#e8d7b6"
},
{
"name": "Y2-5-03",
"label": "y2-5-03",
"hex": "#ae8859"
},
{
"name": "Y2-5-04",
"label": "y2-5-04",
"hex": "#bb9667"
},
{
"name": "Y2-5-05",
"label": "y2-5-05",
"hex": "#c4a174"
},
{
"name": "Y2-5-06",
"label": "y2-5-06",
"hex": "#cead81"
},
{
"name": "Y2-5-07",
"label": "y2-5-07",
"hex": "#dabe91"
},
{
"name": "Y2-5-08",
"label": "y2-5-08",
"hex": "#e2caa1"
},
{
"name": "Y2-5-09",
"label": "y2-5-09",
"hex": "#ead6b0"
},
{
"name": "Y2-5-10",
"label": "y2-5-10",
"hex": "#f4e3c2"
},
{
"name": "Y2-4-03",
"label": "y2-4-03",
"hex": "#c29150"
},
{
"name": "Y2-4-04",
"label": "y2-4-04",
"hex": "#cca064"
},
{
"name": "Y2-4-05",
"label": "y2-4-05",
"hex": "#d7ae75"
},
{
"name": "Y2-4-06",
"label": "y2-4-06",
"hex": "#e1bc84"
},
{
"name": "Y2-4-07",
"label": "y2-4-07",
"hex": "#e8c995"
},
{
"name": "Y2-4-08",
"label": "y2-4-08",
"hex": "#f0d7a8"
},
{
"name": "Y2-4-09",
"label": "y2-4-09",
"hex": "#f8e3bb"
},
{
"name": "Y2-3-03",
"label": "y2-3-03",
"hex": "#d69c42"
},
{
"name": "Y2-3-04",
"label": "y2-3-04",
"hex": "#e0ac53"
},
{
"name": "Y2-3-05",
"label": "y2-3-05",
"hex": "#eaba6a"
},
{
"name": "Y2-3-06",
"label": "y2-3-06",
"hex": "#f0cc87"
},
{
"name": "Y2-3-07",
"label": "y2-3-07",
"hex": "#f6dda8"
},
{
"name": "Y2-3-08",
"label": "y2-3-08",
"hex": "#fbe9c0"
},
{
"name": "Y2-2-03",
"label": "y2-2-03",
"hex": "#f2af3b"
},
{
"name": "Y2-2-04",
"label": "y2-2-04",
"hex": "#f6c056"
},
{
"name": "Y2-2-05",
"label": "y2-2-05",
"hex": "#fad279"
},
{
"name": "Y2-2-06",
"label": "y2-2-06",
"hex": "#fee6a6"
},
{
"name": "Y2-2-07",
"label": "y2-2-07",
"hex": "#fff6d6"
},
{
"name": "01-2-03",
"label": "01-2-03",
"hex": "#e09545"
},
{
"name": "01-2-04",
"label": "01-2-04",
"hex": "#e9a553"
},
{
"name": "01-2-05",
"label": "01-2-05",
"hex": "#f2ba6a"
},
{
"name": "01-2-06",
"label": "01-2-06",
"hex": "#fbd08b"
},
{
"name": "01-2-07",
"label": "01-2-07",
"hex": "#fbdfaa"
},
{
"name": "01-2-08",
"label": "01-2-08",
"hex": "#fff1d2"
},
{
"name": "01-3-03",
"label": "01-3-03",
"hex": "#c88846"
},
{
"name": "01-3-04",
"label": "01-3-04",
"hex": "#d49653"
},
{
"name": "01-3-05",
"label": "01-3-05",
"hex": "#dba768"
},
{
"name": "01-3-06",
"label": "01-3-06",
"hex": "#e7b87d"
},
{
"name": "01-3-07",
"label": "01-3-07",
"hex": "#efca95"
},
{
"name": "01-3-08",
"label": "01-3-08",
"hex": "#f5d9ac"
},
{
"name": "01-3-09",
"label": "01-3-09",
"hex": "#fde9c4"
},
{
"name": "01-4-03",
"label": "01-4-03",
"hex": "#b07947"
},
{
"name": "01-4-04",
"label": "01-4-04",
"hex": "#c08a55"
},
{
"name": "01-4-05",
"label": "01-4-05",
"hex": "#cf9d67"
},
{
"name": "01-4-06",
"label": "01-4-06",
"hex": "#d7ab76"
},
{
"name": "01-4-07",
"label": "01-4-07",
"hex": "#e3bd8b"
},
{
"name": "01-4-08",
"label": "01-4-08",
"hex": "#edce9f"
},
{
"name": "01-4-09",
"label": "01-4-09",
"hex": "#f2dbb3"
},
{
"name": "01-4-10",
"label": "01-4-10",
"hex": "#f7e8ca"
},
{
"name": "01-5-03",
"label": "01-5-03",
"hex": "#a2764d"
},
{
"name": "01-5-04",
"label": "01-5-04",
"hex": "#b08358"
},
{
"name": "01-5-05",
"label": "01-5-05",
"hex": "#bc9367"
},
{
"name": "01-5-06",
"label": "01-5-06",
"hex": "#c7a175"
},
{
"name": "01-5-07",
"label": "01-5-07",
"hex": "#d3af83"
},
{
"name": "01-5-08",
"label": "01-5-08",
"hex": "#dfc095"
},
{
"name": "01-5-09",
"label": "01-5-09",
"hex": "#e9cea7"
},
{
"name": "01-5-10",
"label": "01-5-10",
"hex": "#f0dcbb"
},
{
"name": "01-6-03",
"label": "01-6-03",
"hex": "#94704e"
},
{
"name": "01-6-04",
"label": "01-6-04",
"hex": "#9e7b59"
},
{
"name": "01-6-05",
"label": "01-6-05",
"hex": "#ac8966"
},
{
"name": "01-6-06",
"label": "01-6-06",
"hex": "#b79570"
},
{
"name": "01-6-07",
"label": "01-6-07",
"hex": "#c6a581"
},
{
"name": "01-6-08",
"label": "01-6-08",
"hex": "#d3b895"
},
{
"name": "01-6-09",
"label": "01-6-09",
"hex": "#ddc6a5"
},
{
"name": "01-6-10",
"label": "01-6-10",
"hex": "#e7d5b8"
},
{
"name": "01-6-11",
"label": "01-6-11",
"hex": "#eee0c8"
},
{
"name": "OG-03",
"label": "og-03",
"hex": "#7c6f62"
},
{
"name": "OG-04",
"label": "og-04",
"hex": "#8b7e72"
},
{
"name": "OG-05",
"label": "og-05",
"hex": "#998c80"
},
{
"name": "OG-06",
"label": "og-06",
"hex": "#a79a8f"
},
{
"name": "OG-07",
"label": "og-07",
"hex": "#b8aca1"
},
{
"name": "OG-08",
"label": "og-08",
"hex": "#c6bcb1"
},
{
"name": "OG-09",
"label": "og-09",
"hex": "#d5cdc4"
},
{
"name": "OG-10",
"label": "og-10",
"hex": "#e0dad2"
},
{
"name": "OG-11",
"label": "og-11",
"hex": "#ece8e1"
},
{
"name": "O2-6-03",
"label": "o2-6-03",
"hex": "#886751"
},
{
"name": "O2-6-04",
"label": "o2-6-04",
"hex": "#97765e"
},
{
"name": "O2-6-05",
"label": "o2-6-05",
"hex": "#a5846b"
},
{
"name": "O2-6-06",
"label": "o2-6-06",
"hex": "#b49479"
},
{
"name": "O2-6-07",
"label": "o2-6-07",
"hex": "#bfa087"
},
{
"name": "O2-6-08",
"label": "o2-6-08",
"hex": "#cbb098"
},
{
"name": "O2-6-09",
"label": "o2-6-09",
"hex": "#d7bfa7"
},
{
"name": "O2-6-10",
"label": "o2-6-10",
"hex": "#dfcab3"
},
{
"name": "O2-6-11",
"label": "o2-6-11",
"hex": "#e9d8c3"
},
{
"name": "O2-6-12",
"label": "o2-6-12",
"hex": "#f0e5d4"
},
{
"name": "O2-5-03",
"label": "o2-5-03",
"hex": "#9c7052"
},
{
"name": "O2-5-04",
"label": "o2-5-04",
"hex": "#ae8160"
},
{
"name": "O2-5-05",
"label": "o2-5-05",
"hex": "#bb8f6d"
},
{
"name": "O2-5-06",
"label": "o2-5-06",
"hex": "#c89f7d"
},
{
"name": "O2-5-07",
"label": "o2-5-07",
"hex": "#d2ad8b"
},
{
"name": "O2-5-08",
"label": "o2-5-08",
"hex": "#dcbb9b"
},
{
"name": "O2-5-09",
"label": "o2-5-09",
"hex": "#e5c9ab"
},
{
"name": "O2-5-10",
"label": "o2-5-10",
"hex": "#edd6bc"
},
{
"name": "O2-5-11",
"label": "o2-5-11",
"hex": "#f7e7d2"
},
{
"name": "O2-4-03",
"label": "o2-4-03",
"hex": "#b07952"
},
{
"name": "O2-4-04",
"label": "o2-4-04",
"hex": "#bf8860"
},
{
"name": "O2-4-05",
"label": "o2-4-05",
"hex": "#ca976e"
},
{
"name": "O2-4-06",
"label": "o2-4-06",
"hex": "#d2a47c"
},
{
"name": "O2-4-07",
"label": "o2-4-07",
"hex": "#deb48d"
},
{
"name": "O2-4-08",
"label": "o2-4-08",
"hex": "#e6c29d"
},
{
"name": "O2-4-09",
"label": "o2-4-09",
"hex": "#edd1af"
},
{
"name": "O2-4-10",
"label": "o2-4-10",
"hex": "#f4debf"
},
{
"name": "O2-3-03",
"label": "o2-3-03",
"hex": "#c57d4f"
},
{
"name": "O2-3-04",
"label": "o2-3-04",
"hex": "#d08c5c"
},
{
"name": "O2-3-05",
"label": "o2-3-05",
"hex": "#dc9c6c"
},
{
"name": "O2-3-06",
"label": "o2-3-06",
"hex": "#e7af7e"
},
{
"name": "O2-3-07",
"label": "o2-3-07",
"hex": "#efc293"
},
{
"name": "O2-3-08",
"label": "o2-3-08",
"hex": "#f4d0a5"
},
{
"name": "O2-3-09",
"label": "o2-3-09",
"hex": "#f9ddb9"
},
{
"name": "O2-2-03",
"label": "o2-2-03",
"hex": "#e47e41"
},
{
"name": "O2-2-04",
"label": "o2-2-04",
"hex": "#ec9253"
},
{
"name": "O2-2-05",
"label": "o2-2-05",
"hex": "#f8a96b"
},
{
"name": "O2-2-06",
"label": "o2-2-06",
"hex": "#fabc81"
},
{
"name": "O2-2-07",
"label": "o2-2-07",
"hex": "#fdd1a2"
},
{
"name": "O2-2-08",
"label": "o2-2-08",
"hex": "#ffe4c1"
},
{
"name": "R1-2-02",
"label": "r1-2-02",
"hex": "#c46547"
},
{
"name": "R1-2-03",
"label": "r1-2-03",
"hex": "#d06e4e"
},
{
"name": "R1-2-04",
"label": "r1-2-04",
"hex": "#e1845f"
},
{
"name": "R1-2-05",
"label": "r1-2-05",
"hex": "#e9926c"
},
{
"name": "R1-2-06",
"label": "r1-2-06",
"hex": "#f2a580"
},
{
"name": "R1-2-07",
"label": "r1-2-07",
"hex": "#f9b896"
},
{
"name": "R1-2-08",
"label": "r1-2-08",
"hex": "#fecaaa"
},
{
"name": "R1-3-02",
"label": "r1-3-02",
"hex": "#ad6248"
},
{
"name": "R1-3-03",
"label": "r1-3-03",
"hex": "#bd7155"
},
{
"name": "R1-3-04",
"label": "r1-3-04",
"hex": "#cd8363"
},
{
"name": "R1-3-05",
"label": "r1-3-05",
"hex": "#d6906f"
},
{
"name": "R1-3-06",
"label": "r1-3-06",
"hex": "#dfa180"
},
{
"name": "R1-3-07",
"label": "r1-3-07",
"hex": "#e9b495"
},
{
"name": "R1-3-08",
"label": "r1-3-08",
"hex": "#f1c4a8"
},
{
"name": "R1-3-09",
"label": "r1-3-09",
"hex": "#f7d2b9"
},
{
"name": "R1-3-10",
"label": "r1-3-10",
"hex": "#fddfca"
},
{
"name": "R1-4-03",
"label": "r1-4-03",
"hex": "#a67156"
},
{
"name": "R1-4-04",
"label": "r1-4-04",
"hex": "#b58064"
},
{
"name": "R1-4-05",
"label": "r1-4-05",
"hex": "#c29073"
},
{
"name": "R1-4-06",
"label": "r1-4-06",
"hex": "#cc9e81"
},
{
"name": "R1-4-07",
"label": "r1-4-07",
"hex": "#d8ad91"
},
{
"name": "R1-4-08",
"label": "r1-4-08",
"hex": "#e1ba9f"
},
{
"name": "R1-4-09",
"label": "r1-4-09",
"hex": "#eccbb3"
},
{
"name": "R1-4-10",
"label": "r1-4-10",
"hex": "#f5d8c3"
},
{
"name": "R1-5-03",
"label": "r1-5-03",
"hex": "#926b57"
},
{
"name": "R1-5-04",
"label": "r1-5-04",
"hex": "#a27b66"
},
{
"name": "R1-5-05",
"label": "r1-5-05",
"hex": "#b18a75"
},
{
"name": "R1-5-06",
"label": "r1-5-06",
"hex": "#be9a84"
},
{
"name": "R1-5-07",
"label": "r1-5-07",
"hex": "#cbaa95"
},
{
"name": "R1-5-08",
"label": "r1-5-08",
"hex": "#d6b7a4"
},
{
"name": "R1-5-09",
"label": "r1-5-09",
"hex": "#ddc4b4"
},
{
"name": "R1-5-10",
"label": "r1-5-10",
"hex": "#e8d4c6"
},
{
"name": "R1-5-11",
"label": "r1-5-11",
"hex": "#f2e4da"
},
{
"name": "RG-03",
"label": "rg-03",
"hex": "#796a64"
},
{
"name": "RG-04",
"label": "rg-04",
"hex": "#897a73"
},
{
"name": "RG-05",
"label": "rg-05",
"hex": "#978881"
},
{
"name": "RG-06",
"label": "rg-06",
"hex": "#a89992"
},
{
"name": "RG-07",
"label": "rg-07",
"hex": "#b7a9a2"
},
{
"name": "RG-08",
"label": "rg-08",
"hex": "#c4b8b1"
},
{
"name": "RG-09",
"label": "rg-09",
"hex": "#d2c8c2"
},
{
"name": "RG-10",
"label": "rg-10",
"hex": "#ded8d2"
},
{
"name": "RG-11",
"label": "rg-11",
"hex": "#ede8e3"
},
{
"name": "R2-4-03",
"label": "r2-4-03",
"hex": "#947066"
},
{
"name": "R2-4-04",
"label": "r2-4-04",
"hex": "#a27e74"
},
{
"name": "R2-4-05",
"label": "r2-4-05",
"hex": "#af8b82"
},
{
"name": "R2-4-06",
"label": "r2-4-06",
"hex": "#bd9a91"
},
{
"name": "R2-4-07",
"label": "r2-4-07",
"hex": "#cba9a1"
},
{
"name": "R2-4-08",
"label": "r2-4-08",
"hex": "#d4b5ac"
},
{
"name": "R2-4-09",
"label": "r2-4-09",
"hex": "#dec4bc"
},
{
"name": "R2-4-10",
"label": "r2-4-10",
"hex": "#e9d7d0"
},
{
"name": "R2-3-03",
"label": "r2-3-03",
"hex": "#a87064"
},
{
"name": "R2-3-04",
"label": "r2-3-04",
"hex": "#b58174"
},
{
"name": "R2-3-05",
"label": "r2-3-05",
"hex": "#c19083"
},
{
"name": "R2-3-06",
"label": "r2-3-06",
"hex": "#cd9f90"
},
{
"name": "R2-3-07",
"label": "r2-3-07",
"hex": "#d7aea0"
},
{
"name": "R2-3-08",
"label": "r2-3-08",
"hex": "#dfbeb1"
},
{
"name": "R2-3-09",
"label": "r2-3-09",
"hex": "#e9cdc2"
},
{
"name": "R2-3-10",
"label": "r2-3-10",
"hex": "#efd9d0"
},
{
"name": "R2-2-02",
"label": "r2-2-02",
"hex": "#b56051"
},
{
"name": "R2-2-03",
"label": "r2-2-03",
"hex": "#c16e5e"
},
{
"name": "R2-2-04",
"label": "r2-2-04",
"hex": "#d07e6b"
},
{
"name": "R2-2-05",
"label": "r2-2-05",
"hex": "#da8e7b"
},
{
"name": "R2-2-06",
"label": "r2-2-06",
"hex": "#e39e89"
},
{
"name": "R2-2-07",
"label": "r2-2-07",
"hex": "#eab09b"
},
{
"name": "R2-2-08",
"label": "r2-2-08",
"hex": "#f1c1ae"
},
{
"name": "R2-1-02",
"label": "r2-1-02",
"hex": "#c84f3e"
},
{
"name": "R2-1-03",
"label": "r2-1-03",
"hex": "#db604c"
},
{
"name": "R2-1-04",
"label": "r2-1-04",
"hex": "#e5705a"
},
{
"name": "R2-1-05",
"label": "r2-1-05",
"hex": "#ef856e"
},
{
"name": "R2-1-06",
"label": "r2-1-06",
"hex": "#f59981"
},
{
"name": "R2-1-07",
"label": "r2-1-07",
"hex": "#f9ad95"
},
{
"name": "B1-2-02",
"label": "b1-2-02",
"hex": "#819ad6"
},
{
"name": "B1-2-03",
"label": "b1-2-03",
"hex": "#8fa8df"
},
{
"name": "B1-2-04",
"label": "b1-2-04",
"hex": "#9bb3e2"
},
{
"name": "B1-2-05",
"label": "b1-2-05",
"hex": "#acc1e7"
},
{
"name": "B1-2-06",
"label": "b1-2-06",
"hex": "#bdcfeb"
},
{
"name": "B1-2-07",
"label": "b1-2-07",
"hex": "#cddbee"
},
{
"name": "B1-3-02",
"label": "b1-3-02",
"hex": "#8095ba"
},
{
"name": "B1-3-03",
"label": "b1-3-03",
"hex": "#8ca0c2"
},
{
"name": "B1-3-04",
"label": "b1-3-04",
"hex": "#9daecc"
},
{
"name": "B1-3-05",
"label": "b1-3-05",
"hex": "#aebdd8"
},
{
"name": "B1-3-06",
"label": "b1-3-06",
"hex": "#bdcae0"
},
{
"name": "B1-3-07",
"label": "b1-3-07",
"hex": "#ced8e8"
},
{
"name": "B1-3-08",
"label": "b1-3-08",
"hex": "#dee6f0"
},
{
"name": "B1-3-09",
"label": "b1-3-09",
"hex": "#e8eef0"
},
{
"name": "B1-4-03",
"label": "b1-4-03",
"hex": "#8795b1"
},
{
"name": "B1-4-04",
"label": "b1-4-04",
"hex": "#94a2bc"
},
{
"name": "B1-4-05",
"label": "b1-4-05",
"hex": "#a7b3ca"
},
{
"name": "B1-4-06",
"label": "b1-4-06",
"hex": "#b7c1d5"
},
{
"name": "B1-4-07",
"label": "b1-4-07",
"hex": "#c5cedf"
},
{
"name": "B1-4-08",
"label": "b1-4-08",
"hex": "#d4ddea"
},
{
"name": "B1-4-09",
"label": "b1-4-09",
"hex": "#e1e9f1"
},
{
"name": "B1-5-03",
"label": "b1-5-03",
"hex": "#838998"
},
{
"name": "B1-5-04",
"label": "b1-5-04",
"hex": "#9095a3"
},
{
"name": "B1-5-05",
"label": "b1-5-05",
"hex": "#a0a5b2"
},
{
"name": "B1-5-06",
"label": "b1-5-06",
"hex": "#aeb2bc"
},
{
"name": "B1-5-07",
"label": "b1-5-07",
"hex": "#bdc1c9"
},
{
"name": "B1-5-08",
"label": "b1-5-08",
"hex": "#cccfd5"
},
{
"name": "B1-5-09",
"label": "b1-5-09",
"hex": "#dce0e4"
},
{
"name": "B1-5-10",
"label": "b1-5-10",
"hex": "#e7eaec"
},
{
"name": "BG-03",
"label": "bg-03",
"hex": "#75787a"
},
{
"name": "BG-04",
"label": "bg-04",
"hex": "#868a8c"
},
{
"name": "BG-05",
"label": "bg-05",
"hex": "#96989a"
},
{
"name": "BG-06",
"label": "bg-06",
"hex": "#a3a5a7"
},
{
"name": "BG-07",
"label": "bg-07",
"hex": "#b2b4b5"
},
{
"name": "BG-08",
"label": "bg-08",
"hex": "#c2c3c4"
},
{
"name": "BG-09",
"label": "bg-09",
"hex": "#d1d1d0"
},
{
"name": "BG-10",
"label": "bg-10",
"hex": "#dfe0df"
},
{
"name": "BG-11",
"label": "bg-11",
"hex": "#87929a"
},
{
"name": "B2-5-03",
"label": "b2-5-03",
"hex": "#919ba1"
},
{
"name": "B2-5-04",
"label": "b2-5-04",
"hex": "#a2abb0"
},
{
"name": "B2-5-05",
"label": "b2-5-05",
"hex": "#b4bcc0"
},
{
"name": "B2-5-06",
"label": "b2-5-06",
"hex": "#c8ced1"
},
{
"name": "B2-5-07",
"label": "b2-5-07",
"hex": "#d8dcdd"
},
{
"name": "B2-5-08",
"label": "b2-5-08",
"hex": "#e6e8e7"
},
{
"name": "B2-5-09",
"label": "b2-5-09",
"hex": "#e6e8e7"
},
{
"name": "B2-4-03",
"label": "b2-4-03",
"hex": "#8ca4b5"
},
{
"name": "B2-4-04",
"label": "b2-4-04",
"hex": "#9ab1c1"
},
{
"name": "B2-4-05",
"label": "b2-4-05",
"hex": "#acc0cd"
},
{
"name": "B2-4-06",
"label": "b2-4-06",
"hex": "#bdced7"
},
{
"name": "B2-4-07",
"label": "b2-4-07",
"hex": "#cddbe1"
},
{
"name": "B2-4-08",
"label": "b2-4-08",
"hex": "#dce7ea"
},
{
"name": "B2-4-09",
"label": "b2-4-09",
"hex": "#eef3f1"
},
{
"name": "B2-3-02",
"label": "b2-3-02",
"hex": "#7aa7c1"
},
{
"name": "B2-3-03",
"label": "b2-3-03",
"hex": "#8ab6d0"
},
{
"name": "B2-3-04",
"label": "b2-3-04",
"hex": "#9dc2d9"
},
{
"name": "B2-3-05",
"label": "b2-3-05",
"hex": "#b0d0e2"
},
{
"name": "B2-3-06",
"label": "b2-3-06",
"hex": "#c1dae8"
},
{
"name": "B2-3-07",
"label": "b2-3-07",
"hex": "#d3e6ef"
},
{
"name": "B2-3-08",
"label": "b2-3-08",
"hex": "#e5f1f4"
},
{
"name": "B2-2-02",
"label": "b2-2-02",
"hex": "#75b7d4"
},
{
"name": "B2-2-03",
"label": "b2-2-03",
"hex": "#8bc3dd"
},
{
"name": "B2-2-04",
"label": "b2-2-04",
"hex": "#a4d1e6"
},
{
"name": "B2-2-05",
"label": "b2-2-05",
"hex": "#bbdeef"
},
{
"name": "B2-2-06",
"label": "b2-2-06",
"hex": "#cfe9f4"
},
{
"name": "G1-2-02",
"label": "g1-2-02",
"hex": "#86ba5c"
},
{
"name": "G1-2-03",
"label": "g1-2-03",
"hex": "#97c66d"
},
{
"name": "G1-2-04",
"label": "g1-2-04",
"hex": "#a9d27f"
},
{
"name": "G1-2-05",
"label": "g1-2-05",
"hex": "#c0df97"
},
{
"name": "G1-2-06",
"label": "g1-2-06",
"hex": "#d2e9ae"
},
{
"name": "G1-2-07",
"label": "g1-2-07",
"hex": "#e9f6ca"
},
{
"name": "G1-3-02",
"label": "g1-3-02",
"hex": "#93ab74"
},
{
"name": "G1-3-03",
"label": "g1-3-03",
"hex": "#a2b781"
},
{
"name": "G1-3-04",
"label": "g1-3-04",
"hex": "#b4c595"
},
{
"name": "G1-3-05",
"label": "g1-3-05",
"hex": "#c4d3a7"
},
{
"name": "G1-3-06",
"label": "g1-3-06",
"hex": "#d3dfbb"
},
{
"name": "G1-3-07",
"label": "g1-3-07",
"hex": "#e1eace"
},
{
"name": "G1-3-08",
"label": "g1-3-08",
"hex": "#eff4dd"
},
{
"name": "G1-4-03",
"label": "g1-4-03",
"hex": "#9da886"
},
{
"name": "G1-4-04",
"label": "g1-4-04",
"hex": "#abb595"
},
{
"name": "G1-4-05",
"label": "g1-4-05",
"hex": "#bac2a6"
},
{
"name": "G1-4-06",
"label": "g1-4-06",
"hex": "#c6ceb4"
},
{
"name": "G1-4-07",
"label": "g1-4-07",
"hex": "#d4dbc3"
},
{
"name": "G1-4-08",
"label": "g1-4-08",
"hex": "#e2e7d5"
},
{
"name": "G1-5-03",
"label": "g1-5-03",
"hex": "#90987e"
},
{
"name": "G1-5-04",
"label": "g1-5-04",
"hex": "#9fa68d"
},
{
"name": "G1-5-05",
"label": "g1-5-05",
"hex": "#afb69e"
},
{
"name": "G1-5-06",
"label": "g1-5-06",
"hex": "#bec3ac"
},
{
"name": "G1-5-07",
"label": "g1-5-07",
"hex": "#cad0bb"
},
{
"name": "G1-5-08",
"label": "g1-5-08",
"hex": "#d6dcc9"
},
{
"name": "G1-5-09",
"label": "g1-5-09",
"hex": "#e5e9d8"
},
{
"name": "GG-03",
"label": "gg-03",
"hex": "#868b78"
},
{
"name": "GG-04",
"label": "gg-04",
"hex": "#959986"
},
{
"name": "GG-05",
"label": "gg-05",
"hex": "#a4a794"
},
{
"name": "GG-06",
"label": "gg-06",
"hex": "#b2b5a3"
},
{
"name": "GG-07",
"label": "gg-07",
"hex": "#c3c5b3"
},
{
"name": "GG-08",
"label": "gg-08",
"hex": "#cfd1c1"
},
{
"name": "GG-09",
"label": "gg-09",
"hex": "#dbddd0"
},
{
"name": "GG-10",
"label": "gg-10",
"hex": "#e7e8de"
},
{
"name": "G2-5-03",
"label": "g2-5-03",
"hex": "#a1a77a"
},
{
"name": "G2-5-04",
"label": "g2-5-04",
"hex": "#afb388"
},
{
"name": "G2-5-05",
"label": "g2-5-05",
"hex": "#bdc196"
},
{
"name": "G2-5-06",
"label": "g2-5-06",
"hex": "#cacda4"
},
{
"name": "G2-5-07",
"label": "g2-5-07",
"hex": "#d7dab4"
},
{
"name": "G2-5-08",
"label": "g2-5-08",
"hex": "#e2e5c5"
},
{
"name": "G2-4-03",
"label": "g2-4-03",
"hex": "#adb47c"
},
{
"name": "G2-4-04",
"label": "g2-4-04",
"hex": "#bcc28a"
},
{
"name": "G2-4-05",
"label": "g2-4-05",
"hex": "#c9ce98"
},
{
"name": "G2-4-06",
"label": "g2-4-06",
"hex": "#d5daa7"
},
{
"name": "G2-4-07",
"label": "g2-4-07",
"hex": "#e1e5b9"
},
{
"name": "G2-4-08",
"label": "g2-4-08",
"hex": "#eef0ce"
},
{
"name": "G2-3-02",
"label": "g2-3-02",
"hex": "#b2ba64"
},
{
"name": "G2-3-03",
"label": "g2-3-03",
"hex": "#bdc572"
},
{
"name": "G2-3-04",
"label": "g2-3-04",
"hex": "#cbd188"
},
{
"name": "G2-3-05",
"label": "g2-3-05",
"hex": "#dadf9e"
},
{
"name": "G2-3-06",
"label": "g2-3-06",
"hex": "#e6eab2"
},
{
"name": "G2-3-07",
"label": "g2-3-07",
"hex": "#f1f4c8"
},
{
"name": "G2-2-02",
"label": "g2-2-02",
"hex": "#b2c74b"
},
{
"name": "G2-2-03",
"label": "g2-2-03",
"hex": "#c1d35f"
},
{
"name": "G2-2-04",
"label": "g2-2-04",
"hex": "#d0df77"
},
{
"name": "G2-2-05",
"label": "g2-2-05",
"hex": "#dfeb95"
},
{
"name": "G2-2-06",
"label": "g2-2-06",
"hex": "#eff8ba"
},
{
"name": "G-03",
"label": "g-03",
"hex": "#706e6b"
},
{
"name": "G-04",
"label": "g-04",
"hex": "#7e7b78"
},
{
"name": "G-05",
"label": "g-05",
"hex": "#8d8986"
},
{
"name": "G-06",
"label": "g-06",
"hex": "#9d9995"
},
{
"name": "G-07",
"label": "g-07",
"hex": "#adaaa5"
},
{
"name": "G-08",
"label": "g-08",
"hex": "#bbb8b4"
},
{
"name": "G-09",
"label": "g-09",
"hex": "#cbc9c4"
},
{
"name": "G-10",
"label": "g-10",
"hex": "#dbd8d3"
},
{
"name": "G-11",
"label": "g-11",
"hex": "#e7e6e1"
},
{
"name": "G-12",
"label": "g-12",
"hex": "#f6f5ef"
}
] | [
"[email protected]"
] | |
63e86a6b7e20c8757cc548bbcc34e73f7ec7a7b5 | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_36671.py | da11b4df30e8b638e72b0392d485f68d63020e13 | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,845 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((562.956, 606.319, 542.196), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((596.235, 664.043, 548.709), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((641.842, 732.003, 543.741), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((592.522, 674.052, 426.156), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((759.202, 885.626, 574.56), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((595.975, 638.826, 544.225), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((596.066, 637.629, 544.109), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((622.453, 631.705, 536.044), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((610.124, 606.546, 539.722), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((618.62, 580.563, 545.784), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((637.632, 563.112, 534.592), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((661.896, 574.724, 526.909), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((572.778, 627.147, 555.985), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((752.186, 523.515, 505.736), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((812.378, 708.375, 561.014), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((812.378, 708.375, 561.014), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((787.044, 698.266, 566.914), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((761.984, 686.658, 572.535), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((737.326, 672.387, 575.039), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((709.97, 664.345, 575.127), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((681.501, 666.064, 574.934), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((653.269, 665.568, 570.291), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((881.64, 606.623, 464.7), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((417.405, 721.536, 663.307), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((650.76, 705.733, 581.794), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((650.76, 705.733, 581.794), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((657.307, 703.601, 553.465), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((652.873, 707.718, 524.86), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((633.293, 724.945, 511.461), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((547.772, 636.033, 494.28), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((711.716, 821.821, 525.234), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((585.639, 660.6, 523.058), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((585.423, 660.663, 522.616), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((582.238, 645.56, 498.963), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((592.751, 622.413, 511.243), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((616.031, 607.718, 505.133), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((639.119, 605.282, 521.518), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((653.087, 601.568, 545.752), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((659.536, 578.117, 559.963), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((625.652, 636.703, 612.493), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((692.507, 519.993, 503.511), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((601.128, 675.644, 608.623), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((601.889, 683.222, 583.583), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((605.695, 701.553, 529.548), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((610.247, 720.416, 475.294), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((533.719, 694.295, 470.952), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((672.807, 765.909, 404.604), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((518.526, 660.434, 515.97), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((541.481, 676.562, 522.098), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((566.409, 690.632, 528.682), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((576.472, 717.073, 537.64), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((594.393, 739.643, 546.438), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((610.532, 763.242, 555.039), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((590.523, 686.806, 560.614), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((632.939, 843.689, 553.686), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
] | |
995436ff7e70706bbea6b85818dcb895c3ff70c7 | 4cfbc12903651dedbc799f53a8078433196e7919 | /Pre Processing/Topic 1 - Numpy/Class 1 - IntroductionProgram.py | c3b196507a0ce2566d2dcc1315350a927e618dbd | [] | no_license | gautam4941/Machine_Learning_Codes | 78bf86ab3caf6ee329c88ff18d25927125627a2c | 0009d12ca207a9b0431ea56decc293588eb447b1 | refs/heads/main | 2023-02-06T18:05:44.154641 | 2023-01-30T17:04:25 | 2023-01-30T17:04:25 | 353,594,523 | 0 | 5 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | #How to create numpy array
#Mutability and Immutability
import numpy as np
l = [ [5, 6, 9, 10, 11], [ 1, 2, 3, 5, 6 ] ]
print( l, type(l), len(l) )
arr = np.array( l )
print( f"arr = { arr }", type( arr ), len( arr ), arr.shape )
print()
print( "Printing Numpy Array in the loop" )
for i in arr:
print( f"i : { i }" )
for j in i:
print( f" j : {j}" )
print()
arr[1] = 9
print( "Checking Mutability, " )
print( f"arr = { arr }" )
arr[1][3] = 5
print( f"arr = { arr }" )
print() | [
"[email protected]"
] | |
a8dc43d429450527feb94790ee8b87c4ec620417 | 209c876b1e248fd67bd156a137d961a6610f93c7 | /python/paddle/fluid/tests/unittests/dygraph_to_static/test_slice.py | 96bcc6d016e0c78a32c9b6424844cd81ed8f6f88 | [
"Apache-2.0"
] | permissive | Qengineering/Paddle | 36e0dba37d29146ebef4fba869490ecedbf4294e | 591456c69b76ee96d04b7d15dca6bb8080301f21 | refs/heads/develop | 2023-01-24T12:40:04.551345 | 2022-10-06T10:30:56 | 2022-10-06T10:30:56 | 544,837,444 | 0 | 0 | Apache-2.0 | 2022-10-03T10:12:54 | 2022-10-03T10:12:54 | null | UTF-8 | Python | false | false | 8,857 | py | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tempfile
import unittest
import numpy as np
import paddle
from paddle.static import InputSpec
SEED = 2020
np.random.seed(SEED)
prog_trans = paddle.jit.ProgramTranslator()
@paddle.jit.to_static
def test_slice_without_control_flow(x):
# Python slice will not be transformed.
x = paddle.to_tensor(x)
a = [x]
a[0] = paddle.full(shape=[2], fill_value=2, dtype="float32")
return a[0]
@paddle.jit.to_static
def test_slice_in_if(x):
x = paddle.to_tensor(x)
a = []
if x.numpy()[0] > 0:
a.append(x)
else:
a.append(paddle.full(shape=[1, 2], fill_value=9, dtype="int32"))
if x.numpy()[0] > 0:
a[0] = x
a[0] = x + 1
out = a[0]
return out
@paddle.jit.to_static
def test_slice_in_while_loop(x, iter_num=3):
x = paddle.to_tensor(x)
iter_num_var = paddle.full(shape=[1], fill_value=iter_num, dtype="int32")
a = []
i = 0
while i < iter_num_var:
a.append(x)
i += 1
i = 0
while i < iter_num_var.numpy()[0]:
a[i] = paddle.full(shape=[2], fill_value=2, dtype="float32")
i += 1
out = a[0:iter_num]
return out[0]
@paddle.jit.to_static
def test_slice_in_for_loop(x, iter_num=3):
x = paddle.to_tensor(x)
a = []
# Use `paddle.full` so that static analysis can analyze the type of iter_num is Tensor
iter_num = paddle.full(
shape=[1], fill_value=iter_num, dtype="int32"
) # TODO(liym27): Delete it if the type of parameter iter_num can be resolved
for i in range(iter_num):
a.append(x)
for i in range(iter_num):
a[i] = x
out = a[2]
return out
@paddle.jit.to_static
def test_set_value(x):
x = paddle.to_tensor(x)
x[0] = paddle.full(shape=[1], fill_value=2, dtype="float32")
x[1:2, 0:1] = 10
return x
class LayerWithSetValue(paddle.nn.Layer):
def __init__(self, input_dim, hidden):
super(LayerWithSetValue, self).__init__()
self.linear = paddle.nn.Linear(input_dim, hidden)
@paddle.jit.to_static
def forward(self, x):
x = self.linear(x)
x[0] = 1
return x
class TestSliceWithoutControlFlow(unittest.TestCase):
def setUp(self):
self.init_input()
self.place = paddle.CUDAPlace(
0) if paddle.is_compiled_with_cuda() else paddle.CPUPlace()
self.init_dygraph_func()
paddle.disable_static()
def init_input(self):
self.input = np.random.random((3)).astype('int32')
def init_dygraph_func(self):
self.dygraph_func = test_slice_without_control_flow
def run_dygraph_mode(self):
return self._run(to_static=False)
def _run(self, to_static):
prog_trans.enable(to_static)
res = self.dygraph_func(self.input)
return res.numpy()
def run_static_mode(self):
return self._run(to_static=True)
def test_transformed_static_result(self):
static_res = self.run_static_mode()
dygraph_res = self.run_dygraph_mode()
np.testing.assert_allclose(dygraph_res, static_res, rtol=1e-05)
class TestSliceInIf(TestSliceWithoutControlFlow):
def init_dygraph_func(self):
self.dygraph_func = test_slice_in_if
class TestSliceInWhileLoop(TestSliceWithoutControlFlow):
def init_dygraph_func(self):
self.dygraph_func = test_slice_in_while_loop
class TestSliceInForLoop(TestSliceWithoutControlFlow):
def init_dygraph_func(self):
self.dygraph_func = test_slice_in_for_loop
class TestSetValue(TestSliceWithoutControlFlow):
def init_input(self):
self.input = np.full([3, 4, 5], 5).astype('float32')
def init_dygraph_func(self):
self.dygraph_func = test_set_value
class TestSetValueWithLayerAndSave(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.TemporaryDirectory()
self.model_path = os.path.join(self.temp_dir.name,
"layer_use_set_value")
def tearDown(self):
self.temp_dir.cleanup()
def test_set_value_with_save(self):
prog_trans.enable(True)
model = LayerWithSetValue(input_dim=10, hidden=1)
x = paddle.full(shape=[5, 10], fill_value=5.0, dtype="float32")
paddle.jit.save(layer=model,
path=self.model_path,
input_spec=[x],
output_spec=None)
class TestSliceSupplementSpecialCase(unittest.TestCase):
# unittest for slice index which abs(step)>0. eg: x[::2]
def test_static_slice_step(self):
paddle.enable_static()
array = np.arange(4**3).reshape((4, 4, 4)).astype('int64')
x = paddle.static.data(name='x', shape=[4, 4, 4], dtype='int64')
z1 = x[::2]
z2 = x[::-2]
place = paddle.CPUPlace()
prog = paddle.static.default_main_program()
exe = paddle.static.Executor(place)
exe.run(paddle.static.default_startup_program())
out = exe.run(prog, feed={'x': array}, fetch_list=[z1, z2])
np.testing.assert_array_equal(out[0], array[::2])
np.testing.assert_array_equal(out[1], array[::-2])
def test_static_slice_step_dygraph2static(self):
paddle.disable_static()
array = np.arange(4**2 * 5).reshape((5, 4, 4)).astype('int64')
inps = paddle.to_tensor(array)
def func(inps):
return inps[::2], inps[::-2]
origin_result = func(inps)
sfunc = paddle.jit.to_static(func,
input_spec=[InputSpec(shape=[None, 4, 4])])
static_result = sfunc(inps)
np.testing.assert_array_equal(origin_result[0].numpy(),
static_result[0].numpy())
np.testing.assert_array_equal(origin_result[1].numpy(),
static_result[1].numpy())
class TestPaddleStridedSlice(unittest.TestCase):
def test_compare_paddle_strided_slice_with_numpy(self):
paddle.disable_static()
array = np.arange(5)
pt = paddle.to_tensor(array)
s1 = 3
e1 = 1
stride1 = -2
sl = paddle.strided_slice(pt,
axes=[
0,
],
starts=[
s1,
],
ends=[
e1,
],
strides=[
stride1,
])
self.assertTrue(array[s1:e1:stride1], sl)
array = np.arange(6 * 6).reshape((6, 6))
pt = paddle.to_tensor(array)
s2 = [8, -1]
e2 = [1, -5]
stride2 = [-2, -3]
sl = paddle.strided_slice(pt,
axes=[0, 1],
starts=s2,
ends=e2,
strides=stride2)
np.testing.assert_array_equal(
sl.numpy(), array[s2[0]:e2[0]:stride2[0], s2[1]:e2[1]:stride2[1]])
array = np.arange(6 * 7 * 8).reshape((6, 7, 8))
pt = paddle.to_tensor(array)
s2 = [7, -1]
e2 = [2, -5]
stride2 = [-2, -3]
sl = paddle.strided_slice(pt,
axes=[0, 2],
starts=s2,
ends=e2,
strides=stride2)
array_slice = array[s2[0]:e2[0]:stride2[0], ::, s2[1]:e2[1]:stride2[1]]
np.testing.assert_array_equal(sl.numpy(), array_slice)
def slice_zero_shape_tensor(x):
y = x[1:2]
return y
class TestSliceZeroShapeTensor(unittest.TestCase):
def test_slice(self):
paddle.disable_static()
x = paddle.ones([0, 0, 0, 0])
y = slice_zero_shape_tensor(x)
np.testing.assert_equal(y.shape, [0, 0, 0, 0])
static_func = paddle.jit.to_static(slice_zero_shape_tensor)
y = static_func(x)
np.testing.assert_equal(y.shape, [0, 0, 0, 0])
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
b43697e594bc2ebd2a22cf5aee7a1546b8119f53 | bfbb78b6e6c69644ba70709ad528eb450bcf0cf5 | /public/utils/wraps.py | a4e53c35ad111716201f0036cfb76f1e0a381ff1 | [
"MIT"
] | permissive | xiaoxiaolulu/AndroidAuto | 98b2248175e017d02f5da5b7d11f0f20d4f92dc8 | b8cdba360664f7d1c73b9ab290c7901eaf814f0c | refs/heads/master | 2020-04-12T15:36:25.554509 | 2018-12-20T13:58:06 | 2018-12-20T13:58:06 | 162,586,748 | 8 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ========================================================
# Module : wraps
# Author : Null
# Create Date : 11/11/2018
# Amended by : Null
# Amend History : 11/11/2018
# ========================================================
| [
"[email protected]"
] | |
61bea83fdb813ea82fcd443956c102d9bf5bc6e1 | d24cef73100a0c5d5c275fd0f92493f86d113c62 | /SRC/engine/skeleton.py | 0b58d6332707f5a4b9d108688304cda8dd1a450c | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | rlinder1/oof3d | 813e2a8acfc89e67c3cf8fdb6af6b2b983b8b8ee | 1fb6764d9d61126bd8ad4025a2ce7487225d736e | refs/heads/master | 2021-01-23T00:40:34.642449 | 2016-09-15T20:51:19 | 2016-09-15T20:51:19 | 92,832,740 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128,537 | py | # -*- python -*-
# $RCSfile: skeleton.py,v $
# $Revision: 1.346.2.6 $
# $Author: langer $
# $Date: 2014/09/17 21:26:56 $
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# [email protected].
# A Skeleton contains the geometry of a finite element mesh, without
# any of the details. It has lists of SkeletonElements,
# SkeletonNodes, and SkeletonEdgeBoundary (made up of SkeletonEdges).
# but no intermediate nodes, materials, or shape functions.
## THIS FILE IS NOT USED IN 3D AND SHOULD NOT BE IMPORTED
from ooflib.SWIG.common import config
assert config.dimension() == 2
from ooflib.SWIG.common import coord
from ooflib.SWIG.common import ooferror
from ooflib.SWIG.common import progress
from ooflib.SWIG.common import switchboard
from ooflib.SWIG.common import timestamp
from ooflib.SWIG.engine import femesh
from ooflib.common import debug
from ooflib.common import enum
from ooflib.common import microstructure
from ooflib.common import parallel_enable
from ooflib.common import primitives
from ooflib.common import registeredclass
from ooflib.common import runtimeflags
from ooflib.common import utils
from ooflib.common.IO import parameter
from ooflib.common.IO import reporter
from ooflib.common.IO import xmlmenudump
from ooflib.engine import skeletonboundary
from ooflib.engine import skeletoncontext
from ooflib.engine import skeletondiff
from ooflib.engine import skeletonelement
from ooflib.engine import skeletongroups
from ooflib.engine import skeletonnode
from ooflib.engine import skeletonsegment
from ooflib.engine import skeletonselectable
from ooflib.engine import materialmanager
import math
import random
import time
import types
import weakref
import sys
Registration = registeredclass.Registration
SkeletonNode = skeletonnode.SkeletonNode
PeriodicSkeletonNode = skeletonnode.PeriodicSkeletonNode
SkeletonEdgeBoundary = skeletonboundary.SkeletonEdgeBoundary
ExteriorSkeletonEdgeBoundary = skeletonboundary.ExteriorSkeletonEdgeBoundary
SkeletonPointBoundary = skeletonboundary.SkeletonPointBoundary
ExteriorSkeletonPointBoundary = skeletonboundary.ExteriorSkeletonPointBoundary
SkeletonEdge = skeletonsegment.SkeletonEdge
SkeletonQuad = skeletonelement.SkeletonQuad
SkeletonTriangle = skeletonelement.SkeletonTriangle
SkeletonSegment = skeletonsegment.SkeletonSegment
#PeriodicSkeletonSegment = skeletonsegment.PeriodicSkeletonSegment
# Triangular skeleton arrangements
class Arrangement(
enum.EnumClass(('conservative', 'leaning to the right'),
('liberal', 'leaning to the left'),
('moderate', 'going both ways'),
('middling', 'going both ways, the other way'),
('anarchic', 'going every which way'))):
tip = "Arrangement for triangular initial Skeleton."
discussion = xmlmenudump.loadFile('DISCUSSIONS/engine/enum/arrangement.xml')
utils.OOFdefine('Arrangement', Arrangement)
# Known arrangements.
conservative = Arrangement('conservative')
liberal = Arrangement('liberal')
moderate = Arrangement('moderate')
anarchic = Arrangement('anarchic')
middling = Arrangement('middling')
class SegmentData:
def __init__(self,matname,leftskelel,rightskelel,interfacename):
self._materialname=matname
self._leftskelel=leftskelel
self._rightskelel=rightskelel
self._interfacenames=[interfacename]
def setData(self,matname,leftskelel,rightskelel,interfacename):
self._materialname=matname
self._leftskelel=leftskelel
self._rightskelel=rightskelel
self._interfacenames.append(interfacename)
class SkeletonGeometry(registeredclass.RegisteredClass):
registry = []
def __init__(self, type):
self.type = type
tip = "Element shape for the initial Skeleton."
discussion = """<para>
<classname>SkeletonGeometry</classname> objects are used to
specify the shape of the &elems; in the uniform &skel; created by
<xref linkend='MenuItem-OOF.Skeleton.New'/>.
</para>"""
# much of the code needed by TriSkeleton and QuadSkeleton is repeated
def createGridOfNodes(self, skel, prog, m, n):
# both the QuadSkeleton and TriSkeleton begin with a
# rectangular grid of nodes
## TODO MER: Change names of boundaries Xmin, Ymax, etc. to be
## compatible with 3D.
btmlft = skel.getPointBoundary('bottomleft', exterior=1)
btmrgt = skel.getPointBoundary('bottomright', exterior=1)
toplft = skel.getPointBoundary('topleft', exterior=1)
toprgt = skel.getPointBoundary('topright', exterior=1)
## create nodes and selected point boundaries.
dx = (skel.MS.size()[0]*1.0)/m # Promote numerators to floating-point.
dy = (skel.MS.size()[1]*1.0)/n
tot_items = (m + 1)*(n + 1)
for i in range(n+1):
for j in range(m+1):
# set upper and right edges exactly to avoid roundoff
if i == n:
y = skel.MS.size()[1]
else:
y = i*dy
if j == m:
x = skel.MS.size()[0]
else:
x = j*dx
node = skel.newNode(x,y)
# set node partners
if self.top_bottom_periodicity and i==n:
node.addPartner(skel.getNode(j))
if self.left_right_periodicity and j==m:
node.addPartner(skel.getNode(i*(m+1)))
# set partners diagonal from each other as well
if self.left_right_periodicity and self.top_bottom_periodicity:
if i==n and j==0:
node.addPartner(skel.getNode(m))
if i==n and j==m:
node.addPartner(skel.getNode(0))
# add nodes to corner boundaries
if i==0 and j==0:
btmlft.addNode(node)
if i==0 and j==m:
btmrgt.addNode(node)
if i==n and j==0:
toplft.addNode(node)
if i==n and j==m:
toprgt.addNode(node)
if prog.stopped():
return
nn = i*(n+1)+j+1
prog.setFraction(1.0*nn/tot_items)
prog.setMessage("Allocated %d/%d nodes" % (nn, tot_items))
def addGridSegmentsToBoundaries(self, skel, i, j, m, n):
# both the QuadSkeleton and TriSkeleton add the same
# segments to the edge boundaries
if j == 0:
ll = i*(m+1) + j # lower left node index
ul = (i+1)*(m+1) + j # upper left
lft = skel.getEdgeBoundary('left', exterior=1)
segment = skel.segments[
skeletonnode.canonical_order(skel.nodes[ul],
skel.nodes[ll])]
edge = SkeletonEdge(segment)
edge.set_direction(skel.nodes[ul], skel.nodes[ll])
lft.addEdge(edge)
if j == (m-1):
lr = i*(m+1) + j+1 # lower right
ur = (i+1)*(m+1) + j+1 # upper right
rgt = skel.getEdgeBoundary('right', exterior=1)
segment = skel.segments[
skeletonnode.canonical_order(skel.nodes[lr],
skel.nodes[ur])]
edge = SkeletonEdge(segment)
edge.set_direction(skel.nodes[lr], skel.nodes[ur])
rgt.addEdge(edge)
## if self.left_right_periodicity:
## skel.makeSegmentPartners(segment,skel.segments[
## skeletonnode.canonical_order(skel.nodes[i*(m+1)],
## skel.nodes[(i+1)*(m+1)])])
if i == 0:
ll = i*(m+1) + j # lower left node index
lr = i*(m+1) + j+1 # lower right
btm = skel.getEdgeBoundary('bottom', exterior=1)
segment = skel.segments[
skeletonnode.canonical_order(skel.nodes[ll],
skel.nodes[lr])]
edge = SkeletonEdge(segment)
edge.set_direction(skel.nodes[ll], skel.nodes[lr])
btm.addEdge(edge)
if i == (n-1):
ur = (i+1)*(m+1) + j+1 # upper right
ul = (i+1)*(m+1) + j # upper left
top = skel.getEdgeBoundary('top', exterior=1)
segment = skel.segments[
skeletonnode.canonical_order(skel.nodes[ur],
skel.nodes[ul])]
edge = SkeletonEdge(segment)
edge.set_direction(skel.nodes[ur], skel.nodes[ul])
top.addEdge(edge)
class QuadSkeleton(SkeletonGeometry):
def __init__(self, left_right_periodicity=False,
top_bottom_periodicity=False):
SkeletonGeometry.__init__(self, 'quad')
self.left_right_periodicity = left_right_periodicity
self.top_bottom_periodicity = top_bottom_periodicity
def __call__(self, m, n, microStructure, preset_homog=0):
# Create a skeleton of quadrilateral elements, n rows by m
# columns. The width and height of the entire skeleton are h
# and w.
prog = progress.getProgress("Skeleton", progress.DEFINITE)
try:
skel = Skeleton(microStructure, self.left_right_periodicity,
self.top_bottom_periodicity)
skel.reserveNodes((m+1)*(n+1))
skel.reserveElements(m*n)
## create nodes and selected point boundaries.
self.createGridOfNodes(skel, prog, m, n)
## create elements and edges
tot_items = m*n
for i in range(n): # loop over rows of elements
for j in range(m): # loop over columns of elements
ll = i*(m+1) + j # lower left node index
lr = i*(m+1) + j+1 # lower right
ur = (i+1)*(m+1) + j+1 # upper right
ul = (i+1)*(m+1) + j # upper left
el = skel.newElement([skel.nodes[ll],skel.nodes[lr],
skel.nodes[ur],skel.nodes[ul]])
# Simple skeleton -- set the homogeneity to "1".
if preset_homog:
dom_pixel = microStructure.categoryFromPoint(
el.repr_position())
el.setHomogeneous(dom_pixel)
## el.cachedHomogData.value = cskeleton.HomogeneityData(
## 1.0,dom_pixel)
# Element constructors make the segments, we can use them
# to make the edges and add them to the boundaries.
#To create default boundaries as interfaces
#instead of skeleton boundaries, comment or remove
#the following line.
self.addGridSegmentsToBoundaries(skel, i, j, m, n)
el.findHomogeneityAndDominantPixel(skel.MS)
if prog.stopped():
return None
rectangle_count = i*m+j+1
prog.setFraction(float(rectangle_count)/tot_items)
prog.setMessage("Created %d/%d elements"
% (rectangle_count, tot_items))
return skel
finally:
prog.finish()
Registration(
'QuadSkeleton',
SkeletonGeometry,
QuadSkeleton,
0,
params=[parameter.BooleanParameter('left_right_periodicity',value=False,default=False,
tip="Whether or not the skeleton has periodicity in the horizontal direction"),
parameter.BooleanParameter('top_bottom_periodicity',value=False,default=False,
tip="Whether or not the skeleton has periodicity in the vertical direction")],
tip="A Skeleton of quadrilateral elements.",
discussion="""<para>
<classname>QuadSkeleton</classname> is used as the
<varname>skeleton_geometry</varname> argument of <xref
linkend='MenuItem-OOF.Skeleton.New'/>, specifying that it is to
create a &skel; with quadrilateral &elems;.
</para>""")
class TriSkeleton(SkeletonGeometry):
def __init__(self, arrangement=moderate, left_right_periodicity=False,
top_bottom_periodicity=False):
SkeletonGeometry.__init__(self, 'tri')
self.arrangement = arrangement
self.left_right_periodicity = left_right_periodicity
self.top_bottom_periodicity = top_bottom_periodicity
def __call__(self, m, n, microStructure, preset_homog=0):
prog = progress.getProgress("Skeleton", progress.DEFINITE)
try:
skel = Skeleton(microStructure, self.left_right_periodicity,
self.top_bottom_periodicity)
skel.reserveNodes((m+1)*(n+1))
skel.reserveElements(2*m*n)
## create nodes
self.createGridOfNodes(skel, prog, m, n)
## create elements and edges
tot_items = m*n
rightdiag = 1
for i in range(n): # loop over rows of elements
for j in range(m): # loop over columns of elements
ll = i*(m+1) + j # lower left node index
lr = i*(m+1) + j+1 # lower right
ur = (i+1)*(m+1) + j+1 # upper right
ul = (i+1)*(m+1) + j # upper left
if self.arrangement == conservative:
rightdiag = 1
elif self.arrangement == liberal:
rightdiag = 0
elif self.arrangement == moderate:
rightdiag = (i+j)%2
elif self.arrangement == middling:
rightdiag = 1-(i+j)%2
elif self.arrangement == anarchic:
rightdiag = random.choice([0,1])
else:
debug.fmsg('unknown arrangement!', self.arrangement)
# ul _________ ur
# | /|
# | el1 / |
# | / |
# | / |
# | / |
# | / |
# | / el2 |
# ll |/_______| lr
#
if rightdiag==1:
el1 = skel.newElement([skel.nodes[ll],skel.nodes[ur],
skel.nodes[ul]])
el2 = skel.newElement([skel.nodes[ur],skel.nodes[ll],
skel.nodes[lr]])
#
# ul _________ ur
# |\ |
# | \ |
# | \ el2 |
# | \ |
# | \ |
# | \ |
# | el1 \ |
# ll |_______\| lr
#
else:
el1 = skel.newElement([skel.nodes[lr],skel.nodes[ul],
skel.nodes[ll]])
el2 = skel.newElement([skel.nodes[ul],skel.nodes[lr],
skel.nodes[ur]])
# Simple skeleton -- set the homogeneity to "1".
if preset_homog:
# In the preset case, both elements are on the
# same pixel, so only do this once.
dom_pixel = microStructure.categoryFromPoint(
el1.repr_position())
el1.setHomogeneous(dom_pixel)
el2.setHomogeneous(dom_pixel)
# Element constructors make the segments, we can
# use them to make the edges and add them to the
# boundaries.
#To create default boundaries as interfaces
#instead of skeleton boundaries, comment or remove
#the following line.
self.addGridSegmentsToBoundaries(skel,i,j,m,n)
el1.findHomogeneityAndDominantPixel(skel.MS)
el2.findHomogeneityAndDominantPixel(skel.MS)
if prog.stopped():
return None
rectangle_count = i*m+j
prog.setFraction(float(rectangle_count)/tot_items)
prog.setMessage("Created %d/%d elements"
% (2*rectangle_count, 2*tot_items))
return skel
finally:
prog.finish()
Registration(
'TriSkeleton',
SkeletonGeometry,
TriSkeleton,
1,
params=[enum.EnumParameter('arrangement', Arrangement, moderate,
tip="How to arrange triangular elements in a Skeleton"),
parameter.BooleanParameter('left_right_periodicity',value=False, default=False,
tip="Whether or not the skeleton has periodicity in the horizontal direction"),
parameter.BooleanParameter('top_bottom_periodicity',value=False, default=False,
tip="Whether or not the skeleton has periodicity in the vertical direction")],
tip='A Skeleton of triangular elements.',
discussion="""<para>
<classname>TriSkeleton</classname> is used as the
<varname>skeleton_geometry</varname> argument of <xref
linkend='MenuItem-OOF.Skeleton.New'/>, specifying that it is to
create a &skel; with right triangular &elems;. The
<varname>arrangement</varname> describes how the hypotenuses of
the triangles are to be arranged.
</para>""" )
######################
# SkeletonBase is provided just so different kinds of skeletons
# (Skeleton, DeputySkeleton) can be checked for in a single
# isinstance() call.
class SkeletonBase:
def __init__(self):
self._illegal = 0
# Appears in the Skeleton Page
self.homogeneityIndex = None
self.illegalCount = None
# Keep track of when the skeleton geometry last changed, and
# when the homogeneity index was last updated. Geometry
# changes happen when new elements are added to the skeleton,
# or when they are detected in the
# findHomogeneityandDominantPixel routine of member skeleton
# elements.
self.homogeneity_index_computation_time = timestamp.TimeStamp()
self.homogeneity_index_computation_time.backdate()
self.most_recent_geometry_change = timestamp.TimeStamp()
self.illegal_count_computation_time = timestamp.TimeStamp()
self.illegal_count_computation_time.backdate()
def destroy(self):
pass
def updateGeometry(self):
self.most_recent_geometry_change.increment()
def setHomogeneityIndex(self):
# Tempting though it may be, do not lock the MS here. This
# can be called with the skeleton already locked, which
# implicitly locks the MS.
homogIndex = 0.0
illegalcount = 0
for e in self.elements:
if not e.illegal():
homogIndex += e.area()*e.homogeneity(self.MS)
else:
illegalcount += 1
homogIndex /= self.area()
self.illegalCount = illegalcount
self.homogeneityIndex = homogIndex
self.homogeneity_index_computation_time.increment()
self.illegal_count_computation_time.increment()
def getIllegalCount(self):
if self.illegalCount is None or (self.illegal_count_computation_time
< self.most_recent_geometry_change):
illegalCount = 0
for e in self.elements:
if e.illegal():
illegalCount += 1
self.illegalCount = illegalCount
self.illegal_count_computation_time.increment()
return self.illegalCount
def getIllegalElements(self):
return [e for e in self.elements if e.illegal()]
def getHomogeneityIndex(self):
if (self.homogeneity_index_computation_time < self.MS.getTimeStamp()
or self.homogeneity_index_computation_time <
self.most_recent_geometry_change):
self.setHomogeneityIndex()
return self.homogeneityIndex
# Utility function, finds all the intersections of passed-in
# segment (a primitives.Segment object) with the passed-in
# skeleton element. Needs the skeleton object in order to extract
# skeleton segments. Actually returns a dictionary, indexed by
# points, whose values are lists of the intersecting segments, as
# a tuple, (intersection-point, next-element)
def _get_intersections_with_element(self, local_seg, skel_el):
skel_segs = skel_el.getSegments(self)
isec_set = {}
for s in skel_segs:
nds = s.nodes()
c1 = nds[0].position()
c2 = nds[1].position()
seg = primitives.Segment(c1,c2)
isec = local_seg.intersection(seg)
if isec:
try:
isec_set[isec].append(s)
except KeyError:
isec_set[isec]=[s]
return isec_set
# Element traversal function -- given a skeleton element, local
# segment, and entry point, returns either (None, None) if the
# segment terminates inside the current element, or otherwise, the
# next element along the segment.
def get_intersection_and_next_element(self, local_seg, skel_el, entry):
# First, see if we're already done -- if the current element
# encloses the trailing point of the local segment, there is
# no next element.
if skel_el.interior(local_seg.end()):
return (None, None)
# Find all the intersections with this element.
isec_set = self._get_intersections_with_element(local_seg, skel_el)
# Remove the intersection we already know about -- it's not an
# allowed "exit" intersection.
## TODO MER: The keys in the isec_set dictionary need to be
## tuples containing both the intersection point and the
## element from which the traversal is arriving. This
## function needs to know that previous element as well. If
## the current element is degenerate, the entry point and exit
## point might be identical, so using the entry point alone as
## a key is insufficient. This may have been fixed already.
if entry:
del isec_set[entry]
# If there is no exit intersection, but the interiority check
# on the end-point failed (i.e. it gave the result "exterior"
# for the end-point of local_seg), then the end-point must be
# within round-off of the boundary of the element. In this
# case, there is again no next element.
if len(isec_set)==0:
return (None, None)
# Now the intersection set must be of length one, consisting
# of the "exit wound". If it's not 1 (and not zero, above)
# then something has gone horribly wrong. Throw an exception.
## TODO MER: The horribly wrong situation must be that the element
## is an illegal chevron shaped quad. We could handle such
## quads if they were split into two triangles (at least one
## of which would be illegal, but the algorithm works on
## illegal triangles).
if len(isec_set) !=1:
raise ooferror.ErrPyProgrammingError(
"Segment exits element multiple times.")
isec_point = isec_set.keys()[0]
isec_segs = isec_set.values()[0]
# Now get the *segments* corresponding to the exit point.
# If there's one, then this is the generic case.
if len(isec_segs)==1:
next_el = isec_segs[0].getOtherElement(skel_el)
# If there are two segments, then we exit through a corner.
elif len(isec_segs)==2:
# Find the node common to the two segments.
nds1 = isec_segs[0].nodes()
nds2 = isec_segs[1].nodes()
for n in nds1:
if n in nds2:
common_node = n
break
# Find all the elements connected to this corner which
# have new intersections.
corner_elements = common_node.aperiodicNeighborElements()
new_elements = []
for e in [x for x in corner_elements if x!=skel_el]:
new_isecs = self._get_intersections_with_element(
local_seg, e)
try:
# Remove the one we already know about.
del new_isecs[isec_point]
except KeyError:
pass
# Also remove any new intersections which precede
# isec_point along the local segment. These can sneak
# in in the case where the local segment intersects
# two segments in the current element, but only one in
# the previous element -- in that case, the current
# element will find an exit intersection in the prior
# element, and incorrectly select it as next, leading
# to an infinite loop.
kill_list = []
mp = (isec_point-local_seg.start())**2
for k in new_isecs:
mk = (k-local_seg.start())**2
if mk < mp:
kill_list.append(k)
for k in kill_list:
del new_isecs[k]
# For elements where valid intersections occur,
# add them to the list of candidates.
if len(new_isecs)>0:
new_elements.append(e)
# If we found zero elements, then the cross-section must
# terminate inside one of these. Figure out which one
# by testing interiority.
if len(new_elements)==0:
for e in [x for x in corner_elements if x!=skel_el]:
if e.interior(local_seg.end()):
next_el = e
break
else:
raise ooferror.ErrPyProgrammingError("get_intersection_and_next_element failed, case 0")
# If we found exactly one element, then just return it.
elif len(new_elements)==1:
next_el = new_elements[0]
else:
# If there's more than one, then the local segment
# must pass directly between two elements along their
# shared skeleton segment. Pick the one on the right,
# which is the one whose center has a positive cross
# product with the segment itself, viewed from the
# start of the segment.
for e in new_elements:
v1 = e.center()-local_seg.start()
v2 = local_seg.end()-local_seg.start()
if v1.cross(v2)>0:
next_el = e
break
else:
raise ooferror.ErrPyProgrammingError("get_intersection_and_next_element failed, case n")
else:
# Impossible!
raise ooferror.ErrPyProgrammingError(
"Linear path crosses an element more than twice, or fewer than zero times.")
return (isec_point, next_el)
def enclosingElement(self, point):
# Find the element containing the given point.
# Start at the last element found (if any). Draw a line
# between the center of the element and the target point.
# Find which side of the element intersects the line, and move
# to the neighboring element across that side. Repeat. If no
# side crosses the line, the current element is the one we're
# looking for.
# The algorithm relies on starting inside an element, any
# element. We use the last element found because it's likely
# to be near the element that the user is interested in.
# However, if the last element is illegal, its inside may not
# be well defined. Since we use the element's center to find
# an interior point, and the center of an illegal element may
# be outside of the element, we don't start at illegal
# elements.
# _found_element is either None or a weak reference to the
# last el found
el = self._found_element and self._found_element()
# If the (clicked) point is outside of the skeleton,
# this scheme doesn't work. So, here's a reasonable fix to that.
if point.x < 0.0:
point[0] = 0.0
if point.x > self.MS.size()[0]:
point[0] = self.MS.size()[0]
if point.y < 0.0:
point[1] = 0.0
if point.y > self.MS.size()[1]:
point[1] = self.MS.size()[1]
# If we don't have a good starting point, we look for one.
if el is None or el.illegal():
for ell in self.elements:
if not ell.illegal():
el = ell
break
else:
raise ooferror.ErrSetupError("All elements are illegal!")
center = el.center()
straw = primitives.Segment(center, point)
entry = None
while(el):
last_el = el
(entry, el) = self.get_intersection_and_next_element(
straw, last_el, entry)
self._found_element = weakref.ref(last_el)
return last_el
def nearestSgmt(self, point):
# Local function to compute the distance between a point pt
# and segment. The return value is a tuple whose first entry
# is the distance squared from the point to the closest point
# on the segment. If the projection of the point onto the
# line of the segment is *not* on the segment, the second
# member of the tuple is the distance squared along the line
# from the projected point to the closest endpoint of the
# segment. Simply comparing tuples then gives the closest
# segment to the given point.
def distance(pt, segment):
nodes = segment.nodes()
p0 = nodes[0].position()
p1 = nodes[1].position()
a = pt-p0
b = p1-p0
seglength2 = b**2
if seglength2 == 0:
return (a**2, 0.0)
f = ((a*b)/seglength2) # projection's fractional distance from p0
if f < 0:
alpha = -f
r = pt - p0
elif f > 1:
alpha = f-1
r = pt - p1
else:
r = a-f*b
alpha = 0
return (r**2, alpha*alpha*seglength2)
# First, see if the point is inside an element. If it is, the
# closest segment must be one of the element's edges.
element = self.enclosingElement(point)
if element is not None:
mindist = None
nearseg = None
for i in range(element.nnodes()):
node0 = element.nodes[i]
node1 = element.nodes[(i+1)%element.nnodes()]
seg = self.getSegment(node0, node1)
d = distance(point, seg)
if mindist is None or d < mindist:
mindist = d
nearseg = seg
else:
# Not inside any element. Search all boundary segments.
mindist = None
nearseg = None
for seg in self.segments.values():
if seg.nElements() == 1: # it's a boundary segment
d = distance(point, seg)
if mindist is None or d < mindist:
mindist = d
nearseg = seg
return nearseg
## Call setIllegal() after creating an illegal element.
def setIllegal(self):
self._illegal = 1
def illegal(self):
return self._illegal
## Call checkIllegality after any operation that may have changed
## an illegal skeleton into a legal one. Operations that change
## legal skeletons into illegal ones should be able to check
## legality themselves (without searching all elements) and should
## call setIllegal() directly.
def checkIllegality(self):
for el in self.elements:
if el.illegal():
self._illegal = 1
return
self._illegal = 0
def countShapes(self):
shapecounts = {}
for name in skeletonelement.ElementShapeType.names:
shapecounts[name] = 0
for e in self.elements:
shapecounts[e.type().name] += 1
return shapecounts
#####################
# The Skeleton contains the geometrical information for a mesh,
# without any of the complications of nodes, shapefunctions, or
# materials.
# Skeleton objects live in a SkeletonContext stack, and many of their
# operations are invoked via the Context, which does important
# bookkeeppiinngg.
class Skeleton(SkeletonBase):
def __init__(self, microStructure, left_right_periodicity=False,
top_bottom_periodicity=False):
SkeletonBase.__init__(self)
self.MS = microStructure # Microstructure object, not context
self._size = self.MS.size()
self._area = self._size[0]*self._size[1]
self.nodemovehistory = skeletondiff.NodeMoveHistory()
self.elements = utils.ReservableList()
self._found_element = None # used in enclosingElement().
self.nodes = utils.ReservableList()
self.segments = {} # Nondirected edges.
self.edgeboundaries = {}
self.pointboundaries = {}
self.timestamp = timestamp.TimeStamp()
self.left_right_periodicity = left_right_periodicity
self.top_bottom_periodicity = top_bottom_periodicity
# When elements and nodes are deleted from the mesh, they
# aren't immediately removed from the lists in the Skeleton.
# They're only removed when cleanUp() is called. washMe
# indicates whether or not cleanUp() is necessary.
self.washMe = 0
self.hashedNodes = None
self.deputy = None # currently active DeputySkeleton
self.deputylist = [] # all deputies
self._deferreddestruction = 0
self._destroyed = 0
self.setIndexBase() # Default is to start from zero.
# Decided not to meddle with the exisiting indexing and also not to
# introduce a new indexing.
# Instead, dictionaries -- ex) node.index: index in self.nodes
# With this a node can be fetched with "index" efficiently,
# which is crucial in parallel mode.
# ex) def getNodeWithIndex(index):
# return self.nodes[node_index_dict[index]]
# The first values for these dictionaries always starts from 0,
# which is not a surprise.
# It could be useful in serial mode too, but at this point
# these are only maintained in parallel mode.
self.node_index_count = 0
self.elem_index_count = 0
self.node_index_dict = {}
self.elem_index_dict = {}
self.cachedHomogeneities = {}
# geometric info of all Skeletons (in parallel mode)
if parallel_enable.enabled():
self.all_skeletons = None
# Current index data for each of the three types of skeleton
# objects are stored here -- these get incremented when new
# objects of the indicated type are created in this skeleton.
# These indices should start at zero, and proceed contiguously
# within a skeleton context.
def setIndexBase(self, node_index_base=0,
segment_index_base=0,
element_index_base=0):
self.node_index = node_index_base
self.segment_index = segment_index_base
self.element_index = element_index_base
# Used in parallel mode -- these will not be changed
if parallel_enable.enabled():
self.node_index0 = node_index_base
self.segment_index0 = segment_index_base
self.element_index0 = element_index_base
def reserveElements(self, n):
# self.elements.reserve(n)
pass
def reserveNodes(self, n):
self.nodes.reserve(n)
def isDeputy(self):
return 0
def clearCachedHomogeneities(self):
self.cachedHomogeneities = {}
def destroy(self, skelcontext):
SkeletonBase.destroy(self)
## NOTE: destroy() may be called more than once, if the
## Skeleton has deputies. If the Skeleton has deputies when
## it is destroyed, the _deferreddestruction flag is set, and
## destroy() will be called again when the last deputy is
## destroyed. That means that destroy() can't leave lists of
## destroyed objects lying around -- it must actually replace
## the lists with empty lists.
self._destroyed = True # see NOTE above
# for el in self.elements:
# el.destroy(self)
# self.elements = []
# self.nodes = []
# self.hashedNodes = None
# Any data shared with deputies must not be deleted until the
# deputies are done with it.
if self.ndeputies() == 0:
for el in self.elements:
el.destroy(self)
self.elements = []
self.nodes = []
self.hashedNodes = None
for ebdy in skelcontext.edgeboundaries.values():
ebdy.remove(self)
for pbdy in skelcontext.pointboundaries.values():
pbdy.remove(self)
del self.MS
self.disconnect()
else:
self._deferreddestruction = 1
def destroyed(self):
return self._destroyed
def __repr__(self):
return 'Skeleton(%d)' % id(self)
def disconnect(self):
for s in self.nodes + self.segments.values() + self.elements:
s.disconnect()
def getTimeStamp(self):
return self.timestamp
## return max(self.timestamp, self.MS.getTimeStamp())
def hashNodes(self):
# Computing preliminary information
nnodes = self.nnodes() # no. of nodes
x_size = self.size()[0]
y_size = self.size()[1]
ratio = x_size/y_size # aspect ratio of skeleton
if ratio < 1.0:
ratio = 1.0/ratio
nndtile = int( 0.5*math.sqrt(nnodes)*ratio ) # no. of nodes per tile
ntiles = int( nnodes/nndtile ) # no. of tiles
if x_size >= y_size:
nx = int( math.sqrt(ratio*ntiles) ) # tiles in the x-direction
ny = int( (1.0*nx)/ratio ) # tiles in the y-direction
else:
ny = int( math.sqrt(ratio*ntiles) )
nx = int( (1.0*ny)/ratio )
# For large ratios, the int can yield zero. Fix these.
if nx==0: nx=1
if ny==0: ny=1
self.hashedNodes = skeletonnode.HashedNodes((nx,ny), self.size())
self.hashedNodes.hash(self)
def needsHash(self):
self.hashedNodes = None
def nnodes(self):
self.cleanUp()
return len(self.nodes)
def nelements(self): # for compatiblity w/ Element output
self.cleanUp()
return len(self.elements)
def element_iterator(self): # for compatiblity w/ Element output
self.cleanUp()
return self.elements
def node_iterator(self):
self.cleanUp()
return self.nodes
def segment_iterator(self):
self.cleanUp()
return self.segments.values()
# This returns the position in the skeleton's node list
# which is not the same as node.getIndex()
# Is this used?
## def getNodeIndex(self, node):
## return self.nodes.index(node)
def notPinnedNodes(self):
return [n for n in self.node_iterator() if not n.pinned()]
def getElementIndex(self, elem):
return self.elements.index(elem)
def nillegal(self):
n = 0
for e in self.elements:
if e.illegal():
n += 1
return n
# Returns a tuple containing maximum x-extent and maximum y-extent
# of the skeleton.
def size(self):
return self._size
def area(self):
return self._area
def newNodeFromPoint(self, point):
return self.newNode(point.x, point.y)
def newNode(self, x, y):
if (self.left_right_periodicity and (x == 0.0 or x == self.size()[0])) \
or (self.top_bottom_periodicity and
(y == 0.0 or y == self.size()[1])):
c = PeriodicSkeletonNode(x,y, index=self.node_index)
else:
c = SkeletonNode(x,y, index=self.node_index)
self.node_index += 1
if x == 0.0 or x == self.size()[0]:
c.setMobilityX(0)
if y == 0.0 or y == self.size()[1]:
c.setMobilityY(0)
self.nodes.append(c)
if parallel_enable.enabled():
self.node_index_dict[c.index] = self.node_index_count
self.node_index_count += 1
return c
# Only elements with nonzero area are constructed here. The
# line-elements are expected to be done in the boundary code.
def newElement(self, nodes, parents=[]):
nnodes = len(nodes)
if nnodes==3:
el = SkeletonTriangle(nodes, self.element_index)
self.element_index += 1
elif nnodes==4:
el = SkeletonQuad(nodes, self.element_index)
self.element_index += 1
else:
raise ooferror.ErrPyProgrammingError(
"Unable to construct %d-noded element." % nnodes)
self.elements.append(el)
for parent in parents:
el.add_parent(parent)
parent.add_child(el)
# Add this element's edges to the dictionary of segments.
# fetchSegment returns an existing SkeletonSegment, or makes one
# if necessary.
lastnode = nodes[-1]
for node in nodes:
segment = self.fetchSegment(lastnode, node)
segment.addElement(el)
lastnode = node
self.updateGeometry()
if parallel_enable.enabled():
self.elem_index_dict[el.index] = self.elem_index_count
self.elem_index_count += 1
return el
def loadElement(self, *indices):
nodes = [self.nodes[index] for index in indices]
return self.newElement(nodes)
def loadEdge(self, node0, node1):
seg = self.getSegment(node0, node1)
edge = skeletonsegment.SkeletonEdge(seg)
if seg.nodes()[0] == node0:
edge.direction = 1
else:
edge.direction = -1
return edge
def removeElements(self, *elements):
self.washMe = 1
for el in elements:
el.defunct = 1
el.destroy(self)
def removeNode(self, node):
# Called only by SkeletonNode.destroy() which is called by
# SkeletonNode.removeElement() when the node's last element is
# removed.
self.washMe = 1
node.defunct = 1
# Need to update self.node_index_dict in parallel mode
if parallel_enable.enabled():
node_index = node.getIndex()
list_index = self.node_index_dict[node_index]
# "node_index" will be deleted from the dict and all the nodes
del self.node_index_dict[node_index]
affected_nodes = [self.nodes[i].getIndex()
for i in range(list_index+1, self.nnodes())]
for an in affected_nodes:
self.node_index_dict[an] -= 1
self.node_index_count -= 1
def cleanUp(self):
if self.washMe:
self.elements = filter(lambda e: not hasattr(e, 'defunct'),
self.elements)
self.nodes = filter(lambda n: not hasattr(n, 'defunct'), self.nodes)
self.washMe = 0
def getElement(self, index):
return self.elements[index]
def getElementWithIndex(self, index):
return self.elements[ self.elem_index_dict[index] ]
# This returns a node based on its position in the skeleton's
# node list.
def getNode(self, index):
return self.nodes[index]
# # This returns a node based on its unique index number: the inverse
# # operation of node.getIndex().
# ## TODO MER: change name of "index" to distinguish between list
# ## position and unique ID - nodeID for instance... this indexing
# ## is only maintained in parallel... so it's commented out.
# def getNodeWithIndex(self, index):
# return self.nodes[ self.node_index_dict[index] ]
# getSegment returns an existing segment joining the given nodes,
# or creates a segment if such a segment doesn't already exist.
def getSegment(self, node0, node1):
nodes = skeletonnode.canonical_order(node0, node1)
try:
return self.segments[nodes]
except KeyError:
segment = SkeletonSegment(nodes, self.segment_index)
self.segment_index += 1
self.segments[nodes] = segment
return segment
# fetchSegment is just like getSegment, but it should be faster in
# the case where the Segment is likely *not* to be in the
# dictionary already. (Is this a significant optimization?)
def fetchSegment(self, node0, node1):
nodes = skeletonnode.canonical_order(node0, node1)
if nodes in self.segments:
return self.segments[nodes]
segment = SkeletonSegment(nodes, self.segment_index)
self.segment_index += 1
self.segments[nodes] = segment
return segment
# findSegment returns an existing segment joining the given nodes,
# or None if such a segment doesn't exist.
def findSegment(self, node0, node1):
try:
return self.segments[skeletonnode.canonical_order(node0, node1)]
except KeyError:
return None
def removeSegment(self, key):
# Called only by SkeletonSegment.destroy(), which is called by
# SkeletonSegment.removeElement when the segment's last
# element is removed.
del self.segments[key]
# Geometry comparison function -- returns 0 if this skeleton has
# the same size, area, elements, segments, and boundaries as the
# other, and if all the nodes are within tolerance of the
# positions of the other; otherwise returns a string describing
# what went wrong. Note that these objects must not only be
# topologically equivalent, but must also be indexed the same for
# the comparison to succeed. Does not care about the skeleton
# name, or about microstructure stuff like pixels, or about group
# membership or selection status. The former is properly the
# responsibility of the microstructure, and the latter the
# responsibility of the skeleton context.
def compare(self, other, tolerance):
if self._size != other._size:
return "Size mismatch"
if self._area != other._area:
return "Area mismatch"
if len(self.elements)!=len(other.elements):
return "Element count mismatch"
if len(self.segments)!=len(other.segments):
return "Segment count mismatch"
if len(self.nodes)!=len(other.nodes):
return "Node count mismatch"
# Make sure elements have the same node indices. The elements
# might not be in the same order in the two Skeletons, so we
# can't just compare the node indices in the elements one by
# one. Instead, compare *sorted* lists of lists of node
# indices, one (inner) list for each element.
enodes = [ [n.index for n in e.nodes] for e in self.elements]
onodes = [ [n.index for n in e.nodes] for e in other.elements]
enodes.sort()
onodes.sort()
if enodes != onodes:
return "Element node indexing mismatch"
# Make sure segments have the same node indices. Segments are
# stored in a dictionary keyed by node pairs, so there's no
# need to worry about segment order.
for (s1,s2) in zip(self.segments.values(), other.segments.values()):
if [x.index for x in s1.nodes()]!=[x.index for x in s2.nodes()]:
return "Segment node indexing mismatch"
# Basic topology is right, now quantitatively check node locations.
tol2 = tolerance**2
for (n1,n2) in zip(self.nodes, other.nodes):
if (n1.position()-n2.position())**2 > tol2:
return "Node outside of tolerance, %s-%s=%s" % \
(`n1.position()`, `n2.position()`,
`n1.position()-n2.position()`)
if len(self.edgeboundaries)!=len(other.edgeboundaries):
return "Edge boundary count mismatch"
if len(self.pointboundaries)!=len(other.pointboundaries):
return "Point boundary count mismatch"
# The boundary tests do *not* assume that the boundaries are
# in the same order in the two skeletons.
for key, b1 in self.edgeboundaries.items():
try:
b2 = other.edgeboundaries[key]
except KeyError:
return "Edge boundary name mismatch: %s" % key
if b1.size()!=b2.size():
return "Edge boundary size mismatch: %s" % key
for (e1,e2) in zip(b1.edges, b2.edges):
if [x.index for x in e1.get_nodes()] != \
[x.index for x in e2.get_nodes()]:
return "Edge boundary node mismatch: %s" % key
for key, b1 in self.pointboundaries.items():
try:
b2 = other.pointboundaries[key]
except KeyError:
return "Point boundary name mismatch: %s" % key
if b1.size()!=b2.size():
return "Point boundary size mismatch: %s" % key
for (n1,n2) in zip(b1.nodes, b2.nodes):
if n1.index != n2.index:
return "Point boundary node index mismatch: %s" % key
return 0 # Success!
def properCopy(self, skeletonpath=None, fresh=False):
# Copy the current skeleton properly so that the new skeleton
# and the current skeleton are totally independent. If
# "fresh" is True, then node, segment, and element indices
# start at zero. Otherwise, index-base data is retrieved from
# the skeleton context. "fresh" will be true during adaptive
# mesh refinement.
# The only time skeletonpath is used is when fresh==False, and
# only to be able to call setIndexBase. If fresh==True, there
# is no need to supply skeletonpath.
self.cleanUp()
# create a new skeleton
newSkeleton=Skeleton(self.MS)
if not fresh:
context = skeletoncontext.skeletonContexts[skeletonpath]
newSkeleton.setIndexBase(*context.next_indices)
# Make new nodes which have different indices, but are children
# of the old nodes.
#newSkeleton.nodes = []
for n in self.nodes:
newSkeleton.nodes.append(n.copy_child(newSkeleton.node_index))
newSkeleton.node_index += 1
newSkeleton.left_right_periodicity = self.left_right_periodicity
newSkeleton.top_bottom_periodicity = self.top_bottom_periodicity
# rebuild the node partnerships - must be done in separate loop
# after all nodes are created
for n in self.nodes:
for p in n.getPartners():
n.getChildren()[-1].addPartner(p.getChildren()[-1])
#Copy the following information also:
#SkeletonNode _shared_with = [] # except me
# _remote_index = {} # procID : remote index
if parallel_enable.enabled():
for n1,n2 in zip(self.nodes,newSkeleton.nodes):
n2._shared_with=n1._shared_with[:]
n2._remote_index=n1._remote_index.copy()
# newSkeleton.nodes = [ n.copy_child() for n in self.nodes ]
newSkeleton.elements = []
for e in self.elements:
newSkeleton.elements.append(
e.copy_child(newSkeleton.element_index))
newSkeleton.element_index += 1
# newSkeleton.elements = [ e.copy_child() for e in self.elements ]
# Make new segments which have equivalent indices (with new
# nodes), but are children of the old segments.
for s in self.segments.values():
new_seg = s.copy_child(newSkeleton.segment_index)
newSkeleton.segment_index += 1
newSkeleton.segments[new_seg.nodes()] = new_seg
newSkeleton._illegal = self._illegal
return newSkeleton
def improperCopy(self, skeletonpath=None, fresh=False):
# Copy a Skeleton, but *not* the elements or segments. Just
# nodes. Used when refining, where the elements and segments
# are recreated by Refine.apply().
# If "fresh" is True, then node, segment, and element indices
# start at zero. Otherwise, index-base data is retrieved from
# the skeleton context. The only time skeletonpath is used is
# when fresh==False, and only to be able to call
# setIndexBase. If fresh==True, there is no need to supply
# skeletonpath.
self.cleanUp()
newSkeleton = Skeleton(self.MS)
if not fresh:
context = skeletoncontext.skeletonContexts[skeletonpath]
newSkeleton.setIndexBase(*context.next_indices)
for n in self.nodes:
newSkeleton.nodes.append(n.copy_child(newSkeleton.node_index))
newSkeleton.node_index += 1
newSkeleton.left_right_periodicity = self.left_right_periodicity
newSkeleton.top_bottom_periodicity = self.top_bottom_periodicity
# rebuild the node partnerships - must be done in separate loop
# after all nodes are created
for n in self.nodes:
for p in n.getPartners():
n.getChildren()[-1].addPartner(p.getChildren()[-1])
newSkeleton._illegal = self._illegal
# In parallel mode, node keeps a dict of remote indices,
# {rank: remote_index, ...}. This dict has been copied over but
# it's useless -- the copied skeleton has new indices for nodes.
if parallel_enable.enabled():
from ooflib.SWIG.common import mpitools
offsets = mpitools.Allgather_Int(newSkeleton.node_index0 - \
self.node_index0)
for on, nn in zip(self.nodes, newSkeleton.nodes):
newSkeleton.node_index_dict[nn.index] = \
newSkeleton.node_index_count
newSkeleton.node_index_count += 1
if on.isShared():
for rank, index in on._remote_index.items():
nn.sharesWith(rank, index + offsets[rank])
return newSkeleton
###################
# The following routines are redefined in the DeputySkeleton
# class. A DeputySkeleton is a skeleton that differs from another
# skeleton only in the position of its nodes. See
# engine/deputy.py.
def getIndexBase(self):
return (self.node_index, self.segment_index, self.element_index)
def deputyCopy(self):
from ooflib.engine import deputy # delayed import to avoid loops
# A "copy" that doesn't actually make a copy, but just keeps
# track of which nodes have been moved.
return deputy.DeputySkeleton(self)
def sheriffSkeleton(self): # The sheriff isn't the deputy
return self
def deputize(self, deputy): # install a new deputy
if self.deputy:
self.deputy.deactivate()
self.deputy = deputy
def addDeputy(self, dep):
# Called by DeputySkeleton.__init__()
self.deputylist.append(dep)
def removeDeputy(self, dep, skelcontext):
# Called by DeputySkeleton.destroy()
self.deputylist.remove(dep)
if self._deferreddestruction and self.ndeputies() == 0:
self.destroy(skelcontext)
def ndeputies(self):
return len(self.deputylist)
def activate(self):
if self.deputy:
self.deputy.deactivate()
self.deputy = None
def moveNodeTo(self, node, position):
node.moveTo(position)
for partner in node.getPartners():
partner.moveTo(position)
def moveNodeBy(self, node, delta):
node.moveBy(delta)
for partner in node.getPartners():
partner.moveBy(delta)
def moveNodeBack(self, node):
node.moveBack()
for partner in node.getPartners():
partner.moveBack()
def getMovedNodes(self):
return {}
def nodePosition(self, node):
# Gets the position of the node in this skeleton even if a
# deputy is active.
if self.deputy:
return self.deputy.originalPosition(node)
return node.position()
def newSelectionTracker(self, selectionset):
return skeletonselectable.SelectionTracker()
def newGroupTracker(self, groupset):
return skeletongroups.GroupTracker()
def newPinnedNodeTracker(self):
return skeletonnode.PinnedNodeTracker(self)
def promoteTrackers(self, context):
pass
#######################
def weightedEnergyTotal(self, alpha):
self.cleanUp()
return reduce(lambda x,y: x+y,
[el.area()*el.energyTotal(self, alpha)
for el in self.elements])
def energyTotal(self, alpha):
self.cleanUp()
total = 0.
for el in self.elements:
total += el.energyTotal(self, alpha)
return total
def illegalElements(self):
return [e for e in self.elements if e.illegal()]
def activeElements(self):
self.cleanUp()
return [e for e in self.elements if e.active(self)]
def activeNodes(self):
self.cleanUp()
return [n for n in self.nodes if n.active(self)]
def activeSegments(self):
self.cleanUp()
return [s for s in self.segments.values() if s.active(self)]
def nearestNode(self, point):
if self.hashedNodes is None:
self.hashNodes()
return self.hashedNodes.nearestNode(point)
#################
def mergeNodePairs(self, *pairs):
# Create a ProvisionalChanges object for merging the given
# pairs of nodes in the Skeleton. The arguments are 2-tuples
# of SkeletonNodes. The first node in each pair moves to and
# merges with the second one.
# Check the legality of the merges (as far as node mobility
# goes).
for pair in pairs:
if not pair[0].canMergeWith(pair[1]):
return None
# Set of nodes that are moving
movingNodes = set([pair[0] for pair in pairs])
# List of segments that will vanish
doomedSegments = [self.findSegment(*pair) for pair in pairs]
# Set of all pairs -- this is just the 'pair's argument, but
# will be extended to include periodic partners.
mergingPairs = set(pairs)
# Include periodic partners of the merging nodes.
for pair in pairs:
partners = pair[0].getPartnerPair(pair[1])
if partners:
movingNodes.add(partners[0])
mergingPairs.add(partners)
doomedSegments.append(self.findSegment(*partners))
# Find the topologically changing elements. These are
# elements that have a doomed segment as a side.
topElements = set()
for seg in doomedSegments:
topElements.update(seg.getElements())
# Find the elements that don't change topology, but do change
# shape.
isoElements = set(
[elem for node in movingNodes
for elem in node.aperiodicNeighborElements()])
isoElements -= topElements
# Temporarily move nodes to their final positions to check the
# legality of the elements whose topology doesn't change. The
# legality of the topologically changing elements can't be
# checked here, because moving a node will make the element
# illegal.
for pair in mergingPairs:
pair[0].moveTo(pair[1].position())
try:
for elephant in isoElements:
if elephant.illegal():
return None
finally:
# Make sure to move the nodes back, because the merge may
# still be rejected.
for pair in mergingPairs:
pair[0].moveBack()
change = ProvisionalMerges(self, *pairs)
# Replace the non-topologically changing elements with new
# ones, substituting nodes as necessary.
for oldelement in isoElements:
newnodes = oldelement.nodes[:]
for node0, node1 in mergingPairs:
try:
newnodes[newnodes.index(node0)] = node1
except ValueError:
pass
change.substituteElement(
oldelement,
skeletonelement.getProvisionalElement(newnodes,
oldelement.getParents()))
# Replace or eliminate the topologically changing elements.
for oldelement in topElements:
change.removeElements(oldelement)
newnodes = oldelement.nodes[:]
for node0, node1 in mergingPairs:
try:
newnodes.remove(node0)
except ValueError:
pass
if len(newnodes) >= 3:
change.insertElements(
skeletonelement.getProvisionalElement(
newnodes, parents=oldelement.getParents()))
return change
########################################################################
def getPointBoundary(self, name, exterior=None):
try:
return self.pointboundaries[name]
except KeyError:
if exterior:
bdy = ExteriorSkeletonPointBoundary(name)
else:
bdy = SkeletonPointBoundary(name)
self.pointboundaries[name] = bdy
return bdy
def getEdgeBoundary(self, name, exterior=None):
try:
return self.edgeboundaries[name] # existing bdy with this name
except KeyError: # didn't find existing bdy
if exterior:
bdy = ExteriorSkeletonEdgeBoundary(name)
else:
bdy = SkeletonEdgeBoundary(name) # create it
self.edgeboundaries[name] = bdy # save it
return bdy
# The SkeletonContext has already ensured that there is no collision.
# This routine is called from the SkelContextBoundary object's
# rename routine.
def renameBoundary(self, oldname, newname):
if oldname in self.edgeboundaries:
self.edgeboundaries[newname]=self.edgeboundaries[oldname]
del self.edgeboundaries[oldname]
self.edgeboundaries[newname].rename(newname)
elif oldname in self.pointboundaries:
self.pointboundaries[newname]=self.pointboundaries[oldname]
del self.pointboundaries[oldname]
self.pointboundaries[newname].rename(newname)
# Build a new edge boundary from the passed-in list of segments,
# and return it. The boundary should "point" from the first
# segment to the last. Startnode is required if there is only
# one segment, and is ignored in the other cases.
#
# Caller must provide a topologically trivial list of segments
# with length greater than zero, so all we have to do here is
# figure out the directions for the edges.
def makeEdgeBoundary(self, name, segments=None, startnode=None,
exterior=None):
if (name in self.edgeboundaries) or \
(name in self.pointboundaries):
raise ooferror.ErrPyProgrammingError(
"Boundary '%s' already exists." % name)
bdy = self.getEdgeBoundary(name, exterior) # Guaranteed to be new.
if segments and len(segments)==1:
if startnode:
seg = segments[0]
if startnode==seg.nodes()[0]:
bdy.addEdge(SkeletonEdge(seg, 1))
else: # startnode==seg.nodes()[1]:
bdy.addEdge(SkeletonEdge(seg, -1))
else:
raise ooferror.ErrPyProgrammingError(
"Singleton segment boundaries require a starting node!")
elif segments: # Length of the segment list is greater than one.
for i in range(len(segments)-1):
seg1 = segments[i]
seg2 = segments[i+1]
nodes_and_partners = list(seg2.nodes()) + \
seg2.nodes()[0].getPartners() + \
seg2.nodes()[1].getPartners()
if seg1.nodes()[0] in nodes_and_partners: #seg2.nodes():
bdy.addEdge(SkeletonEdge(seg1, -1))
else: # seg1.nodes()[1] in nodes_and_partners: #seg2.nodes():
bdy.addEdge(SkeletonEdge(seg1, 1))
# For the final segment, need to check the one previous.
seg1 = segments[-2]
seg2 = segments[-1]
nodes_and_partners = list(seg1.nodes()) + \
seg1.nodes()[0].getPartners() + \
seg1.nodes()[1].getPartners()
if seg2.nodes()[0] in nodes_and_partners: #seg1.nodes():
bdy.addEdge(SkeletonEdge(seg2, 1))
else: # seg2.nodes()[1] in seg1.nodes():
bdy.addEdge(SkeletonEdge(seg2, -1))
return bdy # Return bdy even if it has no segments, for stack
# propagation.
def makeNonsequenceableEdgeBoundary(self, name, segments=None,
directions=None,
exterior=None):
if (name in self.edgeboundaries) or \
(name in self.pointboundaries):
raise ooferror.ErrPyProgrammingError(
"Boundary '%s' already exists." % name)
bdy = self.getEdgeBoundary(name, exterior) # Guaranteed to be new.
bdy._sequenceable=0
if segments is not None:
for i in range(len(segments)):
bdy.addEdge(SkeletonEdge(segments[i],directions[i]))
return bdy
# Build a new point boundary from the passed-in list of nodes,
# and return it.
def makePointBoundary(self, name, nodes=None, exterior=None):
if (name in self.pointboundaries) or \
(name in self.edgeboundaries):
raise ooferror.ErrPyProgrammingError(
"Boundary '%s' already exists." % name)
bdy = self.getPointBoundary(name, exterior)
# Correctly returns an empty boundary if nodes==None.
if nodes:
for n in nodes:
bdy.addNode(n)
return bdy
def removeBoundary(self, name):
try:
del self.pointboundaries[name]
except KeyError:
pass
try:
del self.edgeboundaries[name]
except KeyError:
pass
# The named boundary has been modified -- change the versions in
# the mesh to match. Don't just remove and replace, as this
# destroys valuable boundary condition info.
def pushBoundaryToMesh(self, mctxt, name):
if name in self.pointboundaries:
b = self.pointboundaries[name]
mctxt.replacePointBoundary(name, b)
elif name in self.edgeboundaries:
b = self.edgeboundaries[name]
mctxt.replaceEdgeBoundary(name, b)
def mapBoundary(self, bdy, skeleton, **kwargs):
# double dispatch wrapper for SkelContextBoundary.map().
# Copy boundary information from the given skeleton to this
# skeleton. The given skeleton might be a deputy, which
# doesn't have any boundary information, so actually copy from
# the deputy's sheriff.
# mapBoundary is a no-op in the DeputySkeleton class, so we
# don't have to worry about copying boundary data *to* a
# deputy. However, if the given source skeleton is a deputy,
# there's a chance that this skeleton is its sheriff, or
# shares the same sheriff, in which case we don't actually
# have to copy anything.
omar = skeleton.sheriffSkeleton()
if omar is not self.sheriffSkeleton():
bdy.map(omar, self, **kwargs)
## def rmBoundary(self, bdy):
## # double dispatch wrapper for SkelContextBoundary.remove()
## bdy.remove(self)
def find_geometrical_boundaries(self):
for el in self.elements:
el.exterior_edges = []
for seg in self.segment_iterator():
if seg.nElements() == 1:
seg.getElements()[0].exterior_edges.append(seg.nodes())
##############################
def quick_sanity_check(self):
# Just check for illegal elements. For a more thorough check,
# see sanity_check(), below.
sane = True
area = 0.
for element in self.elements:
area += element.area()
if element.illegal():
reporter.report("illegal element", element.index,
[n.position() for n in element.nodes])
sane = False
reporter.report("Total element area is", area)
reporter.report("Microstructure area is", self.MS.area())
if sane:
reporter.report("*** Skeleton quick sanity check passed. ***")
else:
reporter.report("*** Skeleton quick sanity check failed. ***")
return sane
def sanity_check(self):
sane = True
for element in self.elements:
if element.illegal():
reporter.report("illegal element", element.index,
[n.position() for n in element.nodes])
sane = False
for node in element.nodes:
if node not in self.nodes:
reporter.report("element", element.index, "contains a node",
node.index, "not in the skeleton")
sane = False
if element not in node.aperiodicNeighborElements():
reporter.report("inconsistent neighborNodes for node",
node.index, " and element", element.index)
sane = False
segs = element.getSegments(self)
if None in segs:
reporter.report("Element", element.index,
"is missing a segment")
sane = False
for node in self.nodes:
for element in node.aperiodicNeighborElements():
if element not in self.elements:
reporter.report("node", node.index, "contains an element",
element.index, "not in the skeleton")
sane = False
if not node.aperiodicNeighborElements():
reporter.report("Node", node.index, "at", node.position(),
"has no elements!")
sane = False
# Check that nodes on periodic boundaries have partners
x = node.position().x
y = node.position().y
xmax = self.MS.size().x
ymax = self.MS.size().y
if self.left_right_periodicity and (x == 0.0 or x == xmax):
p = node.getDirectedPartner('x')
if not p or ((x == 0.0 and p.position().x != xmax) or
(x == xmax and p.position().x != 0.0)):
reporter.report(node.__class__.__name__, node.index,
"at", node.position(),
"has no periodic partner in x")
reporter.report(" partners are at",
[(ptnr.position(), ptnr.index)
for ptnr in node.getPartners()])
sane = False
if self.top_bottom_periodicity and (y == 0.0 or y == ymax):
p = node.getDirectedPartner('y')
if not p or ((y == 0.0 and p.position().y != ymax) or
(y == ymax and p.position().y != 0.0)):
reporter.report(node.__class__.__name__, node.index,
"at", node.position(),
"has no periodic partner in y")
reporter.report(" partners are at",
[(ptnr.position(), ptnr.index)
for ptnr in node.getPartners()])
reporter.report([ptnr.position()-primitives.Point(x, ymax)
for ptnr in node.getPartners()])
sane = False
# Check self consistency of partner lists
for partner in node.getPartners():
if node not in partner.getPartners():
reporter.report("Inconsistent partner lists for",
node.__class__.__name__, node.index,
"at", node.position(), "and",
partner.__class__.__name__, partner.index,
"at", partner.position())
sane = False
for segment in self.segments.values():
elements = segment.getElements()
if len(elements) > 2:
reporter.report("segment", [n.index for n in segment.nodes()],
"has too many elements:",
[el.index for el in elements])
sane = False
for element in elements:
if element not in self.elements:
reporter.report("segment",
[n.index for n in segment.nodes()],
"contains an element", element.index,
"not in the skeleton")
sane = False
for node in segment.nodes():
if node not in self.nodes:
reporter.report("segment",
[n.index for n in segment.nodes()],
"contains a node", node.index,
"not in the skeleton")
sane = False
if sane:
reporter.report("*** Skeleton Sanity Check passed. ***")
else:
reporter.report("*** Skeleton sanity check failed. ***")
return sane
# ## ### #### ##### ###### ####### ######## ####### ###### ##### #### ### ## #
## Create a real mesh from a Skeleton, using the given element types.
#Break-up elements along interface boundaries as the mesh elements
#get created.
def femesh(self, edict, set_materials, skelpath, split_interface=True):
skelctxt = skeletoncontext.skeletonContexts[skelpath]
# edict[n] is the n-sided master element. Find the
# interpolation order of the elements. They all have the same
# order, so just pick one.
order = edict.values()[0].fun_order()
# set_materials is a function that will be called to assign
# materials to elements.
prog = progress.getProgress("New Mesh", progress.DEFINITE)
prog.setMessage("Preparing...")
self.cleanUp()
# Find which elements and edges are on the geometrical
# boundaries of the system.
self.find_geometrical_boundaries()
# The interfaces included here include both interfaces
# "induced" in the skeleton by microstructure interfaces
# (e.g. between materials, around pixel groups, etc.) *and*
# those corresponding to skeleton boundaries, which need not
# have any microstructural counterpart. The resulting
# interface_seg_dict has as keys all of those segments, and as
# values, SegmentData objects. If a segment occurs in more
# than one interface, it still only appears once in this dict,
# with a segmentData object with all the interfaces included.
interface_seg_dict = self.createInterfaceSegmentDict(skelpath)
fe_splitnode={} #{key=skeleton node:
#value=list of mesh nodes, one for each zone
#around the skeleton node}
#If the skeleton node is not part of an interface,
#then fe_node should be used instead.
# Local dictionary of finite-element nodes, indexed by
# SkeletonNode objects.
fe_node = {}
realmesh = femesh.FEMesh(self.MS, order)
realmesh.skeleton = self
# Reserve space in FEMesh::funcnodes and FEMesh::mapnodes so
# that the vectors aren't continually reallocated.
nels = {} # number of elements of each type
for n in edict:
nels[n] = 0
for el in self.elements:
nels[el.nnodes()] += 1
#TODO OPT: Do a smarter reserve when edgements are involved?
nfuncnodes = self.nnodes() + len(self.segments)*(order-1)
for n, masterelem in edict.items():
nfuncnodes += nels[n]*masterelem.ninteriorfuncnodes()
realmesh.reserveFuncNodes(nfuncnodes)
masterel = edict[edict.keys()[0]]
n_map_per_side = masterel.nexteriormapnodes_only()/masterel.nsides()
nmapnodes = len(self.segments)*n_map_per_side
for n, masterelem in edict.items():
nmapnodes += nels[n]*masterelem.ninteriormapnodes_only()
realmesh.reserveMapNodes(nmapnodes)
# Make the real nodes at the corners of the elements. These
# nodes are always both mapping and function nodes.
mnodecount = self.nnodes()
for i in xrange(mnodecount):
cur = self.nodes[i]
if split_interface:
splitcount=self.countInterfaceZonesAtNode(cur,
interface_seg_dict)
if splitcount>0:
fe_splitnode[cur]=[None]*splitcount
for j in range(splitcount):
realnode = realmesh.newFuncNode(
coord.Coord(cur.position().x, cur.position().y))
fe_splitnode[cur][j]=realnode
else:
#Do the usual
realnode = realmesh.newFuncNode(
coord.Coord(cur.position().x, cur.position().y))
fe_node[cur] = realnode
else:
realnode = realmesh.newFuncNode(
coord.Coord(cur.position().x, cur.position().y))
fe_node[cur] = realnode
if prog.stopped():
prog.setMessage("Interrupted")
return
else:
prog.setFraction(1.0*(i+1)/mnodecount)
prog.setMessage("Allocated %d/%d nodes"%(i+1, mnodecount))
# Loop over elements.
numelements = self.nelements()
realmesh.reserveElements(numelements)
for mesh_idx in xrange(numelements):
el = self.elements[mesh_idx]
local_fe_node={}
if split_interface:
for nd in el.nodes:
zonenumber=self.getInterfaceZoneNumberAtElem(
nd, el, interface_seg_dict)
if zonenumber==-1:
local_fe_node[nd]=fe_node[nd]
else:
local_fe_node[nd]=fe_splitnode[nd][zonenumber]
else: # not splitting nodes at interfaces
for nd in el.nodes:
local_fe_node[nd]=fe_node[nd]
# Index correspondence happens here -- the skeleton
# elements are assigned indices in the order that their
# corresponding real elements are created/assigned.
# (SkeletonElement.realelement sets self.meshindex when it
# creates the real element.)
mnodecount += el.realelement(skelctxt, realmesh, mesh_idx,
local_fe_node, interface_seg_dict,
edict, set_materials)
if prog.stopped():
prog.setMessage("Interrupted")
return
prog.setFraction(1.0*(mesh_idx+1)/numelements)
prog.setMessage("Allocated %d/%d elements"
% (mesh_idx+1, numelements))
# Then do boundaries.
# Note that edgeboundaries and pointboundaries are in separate lists
# in the skeleton, but in a single list in the real mesh.
# Point boundaries first.
dict_size = len(self.pointboundaries)
dict_index = 0
for bdkey, pointbndy in self.pointboundaries.items():
realbndy = realmesh.newPointBoundary(bdkey)
for node in pointbndy.nodes:
#Interface branch
## try:
## realbndy.addNode(fe_node[node]) # Preserve order of nodes.
## except KeyError:
## #Add in all the mesh nodes associated with this skeleton node.
## #Would this work with profiles?
## for realnode in fe_splitnode[node]:
## realbndy.addNode(realnode)
#Do the following to be consistent with mesh.py's newPointBoundary
skelel = node.neighborElements()[0]
realel = realmesh.getElement(skelel.meshindex)
realbndy.addNode(realel.getCornerNode(skelel.getNodeIndexIntoList(node)))
if prog.stopped():
prog.setMessage("Interrupted")
return
else:
prog.setFraction(1.0*(dict_index+1)/dict_size)
prog.setMessage("Allocating point boundaries: %d/%d"
% (dict_index+1, dict_size))
dict_index +=1
# ... then edge boundaries.
dict_size = len(self.edgeboundaries)
dict_index = 0
# Edge boundaries.
for bdkey, edgebndy in self.edgeboundaries.items():
edgebndy.sequence()
realbndy = realmesh.newEdgeBoundary(bdkey)
# Edges are directed, and the boundary is made up of
# sequenced edges, so the geometry is deterministic. When
# we split nodes, we can in principle distinguish
# left-side nodes from right-side nodes. TODO 3.1: Do so.
for skeletonedge in edgebndy.edges:
# Look up the corresponding element from the skeleton.
# When nodes are split, adjacent elements may no
# longer share the same nodes, using getElements()[1]
# instead of (if it exists) getElements()[0] would
# give different results when boundary conditions on
# an interfacial edge are set. Make sure to use the
# element to the left of the skeletonedge.
# TODO 3.1: This doesn't make a lot of sense -- if there's
# no far-side element, then the nodes aren't split, if
# the splitting mechanism uses elements. Also, even
# if they are split, then it's still the "far side"
# nodes that are the actual periphery of the system.
skelel = skeletonedge.getLeftElement()
realel = realmesh.getElement(skelel.meshindex)
edge_nodes = skeletonedge.get_nodes()
realn0 = realel.getCornerNode(
skelel.getNodeIndexIntoList(edge_nodes[0]) )
realn1 = realel.getCornerNode(
skelel.getNodeIndexIntoList(edge_nodes[1]) )
realbndy.addEdge(realel.getBndyEdge(realn0,realn1))
if prog.stopped():
prog.setMessage("Interrupted")
return
else:
prog.setFraction(1.0*(dict_index+1)/dict_size)
prog.setMessage("Allocating edge boundaries: %d/%d"
% (dict_index+1, dict_size))
dict_index +=1
if runtimeflags.surface_mode:
self.createMeshBdysFromInterfaces(skelctxt,
realmesh,interface_seg_dict)
# InterfaceElements are created here. The design intent at
# this point is that the geometry is preserved, "left" and
# "right" are correct for the interface of which this edgement
# is a part, according to the interface definition. For
# exterior edgements, one or the other of the left or right
# elements may not exist. The InterfaceElement constructor is
# responsible for handling this gracefully.
try:
el2 = edict[2]
except KeyError: # Many scripts don't supply 1D masterelements
pass
else:
self.createInterfaceElementsFromInterface(interface_seg_dict,
realmesh, el2)
prog.finish()
return realmesh
# Interfaces are fundamentally defined in the microstructure, as
# existing between different materials, or surrounding certain
# pixel groups. They are "induced" in the skeleton, because
# elements inherit the pixel group or material which makes up the
# majority of their area. The mesh gets the corresponding pixel
# or material identities, but needs to do some physics. These
# structures do that. Interfaces here include all skeleton
# boundaries, including geometrical boundaries (top, bottom, left,
# right). The passed-in seg_dict is keyed by segments, and has
# SegmentData objects as values.
def createMeshBdysFromInterfaces(self,skelctxt,realmesh,seg_dict):
interfacemsplugin=self.MS.getPlugIn("Interfaces")
# Construct a dict interface_seglist[interfacename]=[seg1,seg2,...]
# from the interface graph seg_dict.
# An interface with zero segments would not show up in this dict.
interface_seglist={}
for segkey,data in seg_dict.items():
for interfacename in data._interfacenames:
try:
seglist=interface_seglist[interfacename]
seglist.append(self.segments[segkey])
except KeyError:
interface_seglist[interfacename]=[self.segments[segkey]]
#Construct realmesh boundaries from the interface segments
for interfacename, seglist in interface_seglist.items():
try:
interfacedef=interfacemsplugin.namedinterfaces[interfacename]
except KeyError:
# Must not include skeleton boundary names
continue
# Sequence the segments. The sequenced segments may need
# to be reversed.
try:
(seg_seq, node_seq, winding_vec)=skeletonsegment.segSequence(
seglist)
if len(seg_seq)==0:
#Don't expect this to happen
raise ooferror.ErrPyProgrammingError(
"Got empty sequenced segment list!")
iels = interfacedef.getAdjacentElements(seg_seq[0],skelctxt)
if iels:
if iels.left:
if iels.left.nodesInOrder(node_seq[0],node_seq[1])==0:
seg_seq.reverse()
else:
#iels.left is allowed to be None. This occurs
#for segments lying on the exterior boundary
#and with a 'direction' such that the exterior
#space is to its 'left'.
#
# exterior space,
# to the 'left' of the arrow
# ----------->-----------
# | |
# | microstructure |
# | |
#
if iels.right.nodesInOrder(node_seq[0],node_seq[1]):
seg_seq.reverse()
else:
raise ooferror.ErrPyProgrammingError(
"Expecting this segment to be part of the interface!")
except skeletonsegment.SequenceError:
#Non-sequenceable
seg_seq=seglist
#Create a new realmesh boundary for the interface.
#Note that this mesh boundary does not originate from
#a skeleton boundary!
realbndy = realmesh.newEdgeBoundary(interfacename)
#Add edges to the realmesh boundary
for seg in seg_seq:
iels = interfacedef.getAdjacentElements(seg,skelctxt)
if iels:
if iels.left:
skelel = iels.left
#Make sure to have the nodes in the right order
#for the realmesh boundary edge.
if skelel.nodesInOrder(*seg.get_nodes()):
sn0=seg.get_nodes()[0]
sn1=seg.get_nodes()[1]
else:
sn0=seg.get_nodes()[1]
sn1=seg.get_nodes()[0]
else:
skelel = seg.getElements()[0] # Right-side?
if skelel.nodesInOrder(*seg.get_nodes()):
sn0=seg.get_nodes()[1]
sn1=seg.get_nodes()[0]
else:
sn0=seg.get_nodes()[0]
sn1=seg.get_nodes()[1]
else:
raise ooferror.ErrPyProgrammingError(
"Expecting this segment to be part of the interface!")
realel = realmesh.getElement(skelel.meshindex)
realn0 = realel.getCornerNode(skelel.getNodeIndexIntoList(sn0))
realn1 = realel.getCornerNode(skelel.getNodeIndexIntoList(sn1))
realbndy.addEdge(realel.getBndyEdge(realn0,realn1))
def createInterfaceElementsFromInterface(self,seg_dict,realmesh,edgemaster):
# seg_dict is the dictionary of segments in this interface.
# realmesh is the mesh in which the InterfaceElements will live.
# edgemaster is the 1D master element.
for segkey,data in seg_dict.items():
seg=self.segments[segkey]
els=seg.getElements()
#TODO MER: Interface may not have an interface material
# Left and right are set, but either can be None.
if data._materialname:
cmat=materialmanager.materialmanager[data._materialname].actual
else:
cmat=None
leftelem=data._leftskelel
rightelem=data._rightskelel
# Figure out the "boundary-first" and "boundary-last"
# nodes -- these are needed later to compute the left-side
# and right-side normals.
# boundary_first_node and trailing_node can in principle be
# undefined, if there was neither a right nor left
# element. We'll take that chance.
# Skeleton nodes cannot be split, so we are done for now.
leftnodes=None
rightnodes=None
# The inorder booleans indicate whether the respective
# node lists are in interface-order or not, i.e. if the
# first node in the list is the first one you encounter as
# you traverse the interface with the left-side elements
# on the left and the right-side elements on the right.
# TODO OPT: It may be cleaner to just arrange for the node
# lists to always be in order. This is not done now
# because there may be subtle order-dependencies
# downstream.
leftnodes_inorder = None
rightnodes_inorder = None
if leftelem:
realel = realmesh.getElement(leftelem.meshindex)
irealn0 = leftelem.getNodeIndexIntoList(segkey[0])
realn0 = realel.getCornerNode( irealn0 )
irealn1 = leftelem.getNodeIndexIntoList(segkey[1])
realn1 = realel.getCornerNode( irealn1 )
#Find the nodes on the edge between realn0 and realn1.
#See skeletonelement.py to verify how real nodes
#are added to the list and passed to the element.
ncorners = realel.ncorners()
found=False
if (irealn0+1)%ncorners==irealn1:
leftnodes=[realn0]
for rn in realel.node_iterator():
if found:
if rn==realn1:
break
leftnodes.append(rn)
if rn==realn0:
found=True
leftnodes.append(realn1)
leftnodes_inorder = True
else:
leftnodes=[realn1]
for rn in realel.node_iterator():
if found:
if rn==realn0:
break
leftnodes.append(rn)
if rn==realn1:
found=True
leftnodes.append(realn0)
leftnodes.reverse()
leftnodes_inorder = False
if rightelem:
realel = realmesh.getElement(rightelem.meshindex)
irealn0 = rightelem.getNodeIndexIntoList(segkey[0])
realn0 = realel.getCornerNode( irealn0 )
irealn1 = rightelem.getNodeIndexIntoList(segkey[1])
realn1 = realel.getCornerNode( irealn1 )
if rightelem.nodesInOrder(*segkey):
right_first_node = realn1
right_last_node = realn0
else:
right_first_node = realn0
right_last_node = realn1
#Find the nodes on the edge between realn0 and realn1.
#See skeletonelement.py to verify how real nodes
#are added to the list and passed to the element.
ncorners = realel.ncorners()
found=False
if (irealn0+1)%ncorners==irealn1:
rightnodes=[realn0]
for rn in realel.node_iterator():
if found:
if rn==realn1:
break
rightnodes.append(rn)
if rn==realn0:
found=True
rightnodes.append(realn1)
rightnodes_inorder = False
else:
rightnodes=[realn1]
for rn in realel.node_iterator():
if found:
if rn==realn0:
break
rightnodes.append(rn)
if rn==realn1:
found=True
rightnodes.append(realn0)
rightnodes.reverse()
rightnodes_inorder = True
# TODO MER: Double-check what this is about. Historically,
# the code did this to ensure that both node lists were
# valid, by just passing redundant info in the case where
# one of them is None. This does not break the geometry,
# so leave it in for now.
# NB leftnodes and rightnodes can't *both* be None, that
# only happens if the there are no elements on either side
# of the segment, and in that case, we don't get called at
# all. TODO MER: Is this in fact true? What about empty
# elements?
if not leftnodes:
leftnodes = rightnodes
leftnodes_inorder = rightnodes_inorder
else:
if not rightnodes:
rightnodes = leftnodes
rightnodes_inorder = leftnodes_inorder
if leftelem:
#segmentordernumber is the index of the segment
#in the list returned by skelel.getSegments().
#It is passed to the edgement, so that it can
#be used to retrieve the segment given the first
#side1elem (or side2elem, if side1elem is None).
segmentordernumber=leftelem.getSegmentOrderNumber(seg,self)
else:
# TODO MER: Check if this preserves the geometry.
segmentordernumber=rightelem.getSegmentOrderNumber(seg,self)
#Pass the realnodes on the edge of the element on 1 side
#and the realnodes on the edge on the other side of the
#interface segment. The list of nodes may be identical.
#side1elem or side2elem (but not both) may be None.
# The below is no longer true. The first argument is
# always the left side, even if there is no left side.
#If the interface segment is at an exterior boundary,
#side1elem is automatically the first element returned by
#seg.getElements(), unless the element does not have a material
#or pixel group.
edgement=edgemaster.buildInterfaceElement(leftelem,
rightelem,
segmentordernumber,
cmat,
leftnodes,rightnodes,
leftnodes_inorder,
rightnodes_inorder,
data._interfacenames)
realmesh.addInterfaceElement(edgement)
def getInterfaceZoneNumberAtElem(self,skelnode,skelelem,seg_dict):
# Returns -1 if there are no interfaces at the node.
interfacesegments=self.getInterfaceSegmentsAtNode(skelnode,seg_dict)
if len(interfacesegments)==0:
return -1
# Add in the exterior boundary segments incident on skelnode
# if skelnode is at an exterior boundary.
exteriorsegments = skelnode.exteriorSegments(self)
isExteriorNode = len(exteriorsegments) > 0
for seg in exteriorsegments:
interfacesegments.add(seg)
# nodex=skelnode.position().x
# nodey=skelnode.position().y
# isExteriorNode=False
# if (nodex==0 or nodex==self.size().x or \
# nodey==0 or nodey==self.size().y):
# isExteriorNode=True
# localsegments=skelnode.localSegments(self)
# for seg in localsegments:
# if seg.nElements()==1 and (seg not in interfacesegments):
# interfacesegments.append(seg)
#Start at a segment that lies at an exterior boundary, if it exists
if isExteriorNode:
startsegment = exteriorsegments.pop()
interfacesegments.remove(startsegment)
else:
startsegment = interfacesegments.pop()
# startsegment=interfacesegments[0]
# for seg in interfacesegments:
# if seg.nElements()==1:
# startsegment=seg
# break
# interfacesegments.remove(startsegment)
#CW or CCW, it depends on the first element in getElements()
startelement=startsegment.getElements()[0]
if skelelem==startelement:
return 0
numzones=1
while len(interfacesegments)>0:
#Now starting at startsegment, get the 'fan' of adjacent elements
#until we encounter the next interface segment.
while 1:
if skelelem==startelement:
return numzones-1
nextsegment=startelement.getOppositeSegment(skelnode,
startsegment,
self)
#TODO MER: Assign result directly to startsegment?
startsegment=nextsegment
nextelement=startsegment.getElements()[0]
if nextelement==startelement:
#if getElements() has only one item, then
#startsegment is in interfacesegments
#(startsegment is on an exterior boundary)
#and the loops will terminate.
nextelement=startsegment.getElements()[-1]
startelement=nextelement
#Removing items from interfacesegments might not be
#necessary (e.g. use a counter instead).
if startsegment in interfacesegments:
interfacesegments.remove(startsegment)
numzones+=1
break
if isExteriorNode:
raise ooferror.ErrPyProgrammingError("This shouldn't happen!")
return numzones-1
def countInterfaceZonesAtNode(self,skelnode,seg_dict):
# The number of interface zones at at node is the number of
# interface segments that hit the node, if the node isn't an
# external node. If the node is external, it's the number of
# non-external interface segments plus one.
interfacesegments = self.getInterfaceSegmentsAtNode(skelnode, seg_dict)
if len(interfacesegments)==0:
return 0
exteriorsegments = skelnode.exteriorSegments(self)
for seg in exteriorsegments:
try:
interfacesegments.remove(seg)
except KeyError:
pass
numzones = len(interfacesegments)
if exteriorsegments:
numzones += 1
return numzones
def getInterfaceSegmentsAtNode(self,skelnode,seg_dict):
neighbornodes = skelnode.neighborNodes(self)
localsegments = set()
for nd in neighbornodes:
segkey = skeletonnode.canonical_order(nd,skelnode)
if segkey in seg_dict:
localsegments.add(self.segments[segkey])
return localsegments
# This function is used when creating a skeleton boundary from an
# interface. This function is also called by
# DirectorInterfacesWidget (boundarybuilderGUI.py)
def getInterfaceSegments(self,skelctxt,interfacename):
interfacemsplugin=self.MS.getPlugIn("Interfaces")
seglist=[]
directionlist=[]
try:
interfacedef=interfacemsplugin.namedinterfaces[interfacename]
for key,seg in self.segments.items():
iels = interfacedef.getAdjacentElements(seg,skelctxt)
if iels:
seglist.append(seg)
if iels.left:
if iels.left.nodesInOrder(*seg.get_nodes()):
directionlist.append(1)
else:
directionlist.append(-1)
else:
# iels.left is allowed to be None. This occurs
# for segments lying on the exterior boundary
# and with a 'direction' such that the
# exterior space is to its 'left'.
#
# exterior space,
# to the 'left' of the arrow
# ----------->-----------
# | |
# | microstructure |
# | |
#
# If iels.left is None but iels evaluates to True,
# then iels.right is guaranteed not to be None.
if iels.right.nodesInOrder(*seg.get_nodes()):
directionlist.append(-1)
else:
directionlist.append(1)
except KeyError:
pass
return seglist, directionlist
# getAdjacentElements now returns left and right data.
# SegmentData lists should use it.
def createInterfaceSegmentDict(self,skelpath):
seg_dict = {}
skelctxt = skeletoncontext.skeletonContexts[skelpath]
if runtimeflags.surface_mode:
interfacemsplugin=self.MS.getPlugIn("Interfaces")
# For each interface, for each segment of the skeleton,
# check if the segment is a member of the interface (by
# asking the interface itself, via the plug-in), and
# retrieve the left-side and right-side elements.
for interfacename, interfacedef in interfacemsplugin.namedinterfaces.items():
for key,seg in self.segments.items():
iels = interfacedef.getAdjacentElements(seg,skelctxt)
if iels:
matname = interfacemsplugin.getInterfaceMaterialName(
interfacename)
try:
segmentdata=seg_dict[key]
# previous material and skeleton element get
# overwritten, but interfacename is added to
# the list.
segmentdata.setData(matname,
iels.left,
iels.right,
interfacename)
except KeyError:
seg_dict[key]=SegmentData(matname,
iels.left,
iels.right,
interfacename)
# End of surface_mode conditional.
#Generate segments along skeleton boundaries
for bdkey, edgebndy in self.edgeboundaries.items():
matname=skelctxt.getBoundary(bdkey)._interfacematerial
self._createInterfaceSegmentDictFromSkelBoundary(bdkey, edgebndy,
matname,
seg_dict)
return seg_dict
# _createInterfaceSegmentDictFromSkelBoundary creates the
# SegmentData objects needed to create interfaces from the
# skeleton boundaries. This info is added to the given seg_dict.
def _createInterfaceSegmentDictFromSkelBoundary(self,bdkey,skelbdy,
matname, seg_dict):
skelbdy.sequence() #Should have been sequenced by this point
bdylength=len(skelbdy.edges)
for i in xrange(0,bdylength):
skeledge=skelbdy.edges[i]
# NOTE: skeledge.get_nodes() returns the nodes in the
# order indicated by skeledge.direction.
# skeledge.segment.get_nodes() returns the nodes already
# in canonical order (see skeletonsegment.py)
segkey=skeledge.segment.get_nodes()
els=skeledge.segment.getElements()
if len(els) == 1:
#seg_dict[segkey]=(matname,els[0],bdkey)
if els[0].nodesInOrder(*skeledge.get_nodes()):
#seg_dict[segkey]=(matname,els[0],bdkey)
leftelem=els[0]
rightelem=None
else:
#seg_dict[segkey]=(matname,els[1],bdkey)
leftelem=None
rightelem=els[0]
else:
assert len(els) == 2
# Nodes in an element are ordered in a CCW fashion.
# If nodes in a directed segment have the same order
# as in the element, then that element is to the left
# of the directed segment.
#
# s ----<------
# ----<------
# | |
# \|/ e /|\
# | |
# ---->------
#
# (element e is to the 'left' of directed segment s)
#
if els[0].nodesInOrder(*skeledge.get_nodes()):
#seg_dict[segkey]=(matname,els[0],bdkey)
leftelem=els[0]
rightelem=els[1]
else:
#seg_dict[segkey]=(matname,els[1],bdkey)
leftelem=els[1]
rightelem=els[0]
try:
segmentdata=seg_dict[segkey]
# previous material and skeleton element get overwritten
segmentdata.setData(matname,leftelem,rightelem,bdkey)
except KeyError:
seg_dict[segkey]=SegmentData(matname,leftelem,rightelem,bdkey)
############################################################################
####################### femesh_shares, for parallel #####################
# This version of the method indicates the funcnodes that are
# shared between processes. This sharing information is assumed
# to have been created by Haan's code. The sharing information
# should reach the dofs, fields and equations later.
def femesh_shares(self, edict, set_materials):
# edict[n] is the n-sided master element. Find the
# interpolation order of the elements. They all have the same
# order, so just pick one.
#order = edict[edict.keys()[0]].fun_order()
#This should do the same thing
order = edict.values()[0].fun_order()
# set_materials is a function that will be called to assign
# materials to elements.
prog = progress.getProgress("parallel femesh", progress.DEFINITE)
self.cleanUp()
# Local dictionary of finite-element nodes, indexed by
# SkeletonNode objects.
fe_node = {}
# Find which elements and edges are on the geometrical
# boundaries of the system.
self.find_geometrical_boundaries()
realmesh = femesh.FEMesh(self.MS, order)
# Reserve space in FEMesh::funcnodes and FEMesh::mapnodes so
# that the vectors aren't continually reallocated.
nels = {} # number of elements of each type
for n in edict:
nels[n] = 0
for el in self.elements:
nels[el.nnodes()] += 1
nfuncnodes = self.nnodes() + len(self.segments)*(order-1)
for n, masterelem in edict.items():
nfuncnodes += nels[n]*masterelem.ninteriorfuncnodes()
realmesh.reserveFuncNodes(nfuncnodes)
masterel = edict[edict.keys()[0]]
n_map_per_side = masterel.nexteriormapnodes_only()/masterel.nsides()
nmapnodes = len(self.segments)*n_map_per_side
for n, masterelem in edict.items():
nmapnodes += nels[n]*masterelem.ninteriormapnodes_only()
realmesh.reserveMapNodes(nmapnodes)
# Make the real nodes at the corners of the elements. These
# nodes are always both mapping and function nodes. A
# "meshindex" attribute is written into the skeleton node.
mnodecount = self.nnodes()
for i in range(self.nnodes()):
cur = self.nodes[i]
#Have to include the local skeleton index and the
#"remote" indices, as well as the processes that share that node.
realnode = realmesh.newFuncNode_shares(
coord.Coord(cur.position().x, cur.position().y),
cur.sharedWith(),
[cur.remoteIndex(procnum) for procnum in cur.sharedWith()],
cur.index)
fe_node[cur] = realnode
#cur.setMeshIndex(realnode.index())
if prog.stopped():
return
prog.setFraction(1.0*(i+1)/mnodecount)
prog.setMessage("Allocated %d/%d nodes"%(i+1, mnodecount))
# Loop over elements.
numelements = self.nelements()
realmesh.reserveElements(numelements)
for mesh_idx in range(self.nelements()):
el = self.elements[mesh_idx]
# Index correspondence happens here -- the skeleton
# elements are assigned indices in the order that their
# corresponding real elements are created/assigned.
# (SkeletonElement.realelement sets self.meshindex when it
# creates the real element.)
mnodecount += el.realelement_shares(
self, realmesh, mesh_idx, fe_node, mnodecount,
edict, set_materials)
if prog.stopped():
return
prog.setFraction(1.0*(mesh_idx+1)/numelements)
prog.setMessage("Allocated %d/%d elements"
% (mesh_idx+1, numelements))
# Then do boundaries.
# Note that edgeboundaries and pointboundaries are in separate lists
# in the skeleton, but in a single list in the real mesh.
# Point boundaries first.
dict_size = len(self.pointboundaries)
dict_index = 0
for bdkey, pointbndy in self.pointboundaries.items():
realbndy = realmesh.newPointBoundary(bdkey)
for node in pointbndy.nodes:
realbndy.addNode(fe_node[node]) # Preserve order of nodes.
if prog.stopped():
return
prog.setFraction(1.0*(dict_index+1)/dict_size)
prog.setMessage("Allocated %d/%d point boundaries"
% (dict_index+1, dict_size))
dict_index +=1
# ... then edge boundaries.
dict_size = len(self.edgeboundaries)
dict_index = 0
for bdkey, edgebndy in self.edgeboundaries.items():
edgebndy.sequence()
realbndy = realmesh.newEdgeBoundary(bdkey)
for skeletonedge in edgebndy.edges:
# Look up the corresponding element from the skeleton.
skelel = skeletonedge.segment.getElements()[0]
realel = realmesh.getElement(skelel.meshindex)
edge_nodes = skeletonedge.get_nodes()
realn0 = fe_node[edge_nodes[0]] # First real node.
realn1 = fe_node[edge_nodes[1]] # Second real node.
realbndy.addEdge(realel.getBndyEdge(realn0,realn1))
if prog.stopped():
return
prog.setFraction(1.0*(dict_index+1)/dict_size)
prog.setMessage("Allocated %d/%d edge boundaries"
% (dict_index+1, dict_size))
dict_index +=1
return realmesh
########################## end femesh_shares ###############################
## end of class Skeleton
########################################################################
def newEmptySkeleton(name, msname, left_right_periodicity=False,
top_bottom_periodicity=False):
mscontext = microstructure.microStructures[msname]
ms = mscontext.getObject()
skel = Skeleton(ms, left_right_periodicity, top_bottom_periodicity)
skeletoncontext.skeletonContexts.add([msname, name], skel, parent=mscontext)
return skel
# skeleton_geometry is an object of type SkeletonGeometry, class defined above.
def initialSkeleton(name, ms, nx, ny, skeleton_geometry):
skel = skeleton_geometry(nx, ny, ms)
if skel is not None:
mscontext = microstructure.microStructures[ms.name()]
skeletoncontext.skeletonContexts.add([ms.name(), name],
skel, parent=mscontext)
return skel
# Parallel initial skeleton
if parallel_enable.enabled():
def initialSkeletonParallel(name, ms, nx, ny, skeleton_geometry):
from ooflib.engine.IO import skeletonIPC
skeletonIPC.smenu.Initialize(name=name, microstructure=ms,
x_elements=nx, y_elements=ny,
skeleton_geometry=skeleton_geometry)
# Create pixel-to-element skeleton. Thus, homogeneity of all elements will
# be set to "1".
def simpleSkeleton(name, ms, nx, ny, skeleton_geometry):
skel = skeleton_geometry(nx, ny, ms, preset_homog=True)
mscontext = microstructure.microStructures[ms.name()]
skeletoncontext.skeletonContexts.add([ms.name(), name], skel,
parent=mscontext)
return skel
###########################
## TODO 3.1: Remove the 'skeleton' argument in all ProvisionalChanges
## methods, because self.skeleton can now be used instead. It's
## probably necessary to give DeputyProvisionalChanges a self.skeleton
## as well.
class ProvisionalChanges:
def __init__(self, skeleton):
self.skeleton = skeleton # Skeleton object. Not context.
self.removed = [] # elements to be removed
self.inserted = [] # provisional elements added
self.substitutions = [] # pairs (old, new) of el. substitutions
self.seg_subs = {} # {old:[new segs] ...}
self.movednodes = [] # list of MoveNode objects
self.cachedDeltaE = None # Energy difference
self.before = None # Elements before the change
self.after = None # Elements after the change
def removeAddedNodes(self, skeleton):
# redefined by subclasses that add nodes
pass
class MoveNode: # nested class definition
def __init__(self, node=None, position=None, mobility=None):
self.node = node
self.position = position
self.mobility = mobility
def removeElements(self, *elements):
for element in elements:
self.removed.append(element)
def insertElements(self, *elements):
for element in elements:
self.inserted.append(element)
def substituteElement(self, old, new):
# Old and new elements must have the same number of nodes, and
# corresponding nodes must be in the same positions in the
# element's node lists, so that the correct parent-child
# relationships may be made for the corresponding segments.
self.substitutions.append([old, new])
def substituteSegment(self, old, new): # "new" has to be a list
self.seg_subs[old] = new
def moveNode(self, node, position, mobility=(1,1)):
self.movednodes.append(
self.MoveNode(node=node, position=position, mobility=mobility))
def nRemoved(self):
return len(self.removed)
def elBefore(self):
if self.before is None:
self.before = self.removed + [o for o,n in self.substitutions]
for mvnode in self.movednodes:
for nbr in mvnode.node.neighborElements():
if nbr not in self.before:
self.before.append(nbr)
return self.before
def elAfter(self):
if self.after is None:
self.after = self.inserted + [n for o,n in self.substitutions]
for mvnode in self.movednodes:
for nbr in mvnode.node.neighborElements():
if (nbr not in self.removed) and (nbr not in self.after):
self.after.append(nbr)
return self.after
def makeNodeMove(self, skeleton):
for mvnode in self.movednodes:
mvnode.node.moveTo(mvnode.position)
def moveNodeBack(self, skeleton):
for mvnode in self.movednodes:
mvnode.node.moveBack()
def illegal(self, skeleton):
# Will this change produce any illegal elements?
self.makeNodeMove(skeleton) # Move nodes to simulate the change
try:
# Check elements
for element in self.elAfter():
if element.illegal():
return True
finally:
self.moveNodeBack(skeleton) # Move nodes back
return False
def deltaE(self, skeleton, alpha):
# Return the change in energy per element if this move were to
# be accepted.
if self.cachedDeltaE is None:
# Energy before the change
oldE = 0.0
for element in self.elBefore():
oldE += element.energyTotal(skeleton, alpha)
oldE /= len(self.elBefore())
# Move nodes accordingly to simulate the change
self.makeNodeMove(skeleton)
# Energy after the change
newE = 0.0
for element in self.elAfter():
# TODO OPT: perhaps using cachedHomogeneities as in
# the deputy would be helpful here too
newE += element.energyTotal(skeleton, alpha)
newE /= len(self.elAfter())
# Move node back
self.moveNodeBack(skeleton)
# Energy differnce due to the change
self.cachedDeltaE = newE - oldE
return self.cachedDeltaE
def deltaEBound(self, skeleton, alpha):
# Return the maximum possible deltaE -- assuming all elements
# become homogenous after the change
if self.cachedDeltaEBound is None:
# Energy before the change
oldE = 0.0
for element in self.elBefore():
oldE += element.energyTotal(self.skeleton, alpha)
oldE /= len(self.elBefore())
# Move nodes accordingly to simulate the change
self.makeNodeMove(self.skeleton)
# Energy after the change
newE = 0.0
for element in self.elAfter():
newE += (1.-alpha)*element.energyShape()+alpha
newE /= len(self.elAfter())
# Move node back
self.moveNodeBack(self.skeleton)
# Energy differnce due to the change
self.cachedDeltaEBound = newE - oldE
return self.cachedDeltaEBound
def accept(self, skeleton):
# Create actual elements to replace the provisional ones. The
# actual elements replace their predecessors in the
# ProvisionalChanges object, so that they're available to the
# calling routine.
## TODO OPT: Remove argument and use self.skeleton instead?
self.inserted = [element.accept(skeleton) for element in self.inserted]
for mvnode in self.movednodes:
mvnode.node.moveTo(mvnode.position)
if mvnode.mobility:
mvnode.node.setMobilityX(mvnode.mobility[0])
mvnode.node.setMobilityY(mvnode.mobility[1])
for pair in self.substitutions:
old, new = pair
newelement = new.accept(skeleton)
pair[1] = newelement
oldsegments = old.getSegments(skeleton)
newsegments = newelement.getSegments(skeleton)
for oldseg, newseg in zip(oldsegments, newsegments):
for parent in oldseg.getParents():
newseg.add_parent(parent)
parent.add_child(newseg)
# Call Skeleton.removeElements only *after* the segment
# parents have been reestablished, because removing the
# elements may remove the segments from the skeleton.
skeleton.removeElements(old)
for old in self.seg_subs:
new_segs = self.seg_subs[old]
for new in new_segs:
for parent in old.getParents():
new.add_parent(parent)
parent.add_child(new)
skeleton.removeElements(*self.removed)
class ProvisionalInsertion(ProvisionalChanges):
def __init__(self, skeleton):
ProvisionalChanges.__init__(self, skeleton)
self.addedNodes = []
def addNode(self, node):
self.addedNodes.append(node)
def removeAddedNodes(self, skeleton):
## TODO OPT: Remove argument and use self.skeleton instead?
for n in self.addedNodes:
n.destroy(skeleton)
class ProvisionalMerge(ProvisionalChanges):
def __init__(self, skeleton, node0, node1):
ProvisionalChanges.__init__(self, skeleton)
self.node0 = node0
self.node1 = node1
def accept(self, skeleton):
## TODO OPT: Remove argument and use self.skeleton instead?
self.node0.makeSibling(self.node1)
ProvisionalChanges.accept(self, skeleton)
class ProvisionalMerges(ProvisionalChanges):
def __init__(self, skeleton, *pairs):
ProvisionalChanges.__init__(self, skeleton)
self.pairs = pairs
def accept(self, skeleton):
for pair in self.pairs:
pair[0].makeSibling(pair[1])
ProvisionalChanges.accept(self, skeleton)
| [
"[email protected]"
] | |
c18094385c97b85f0c38cca69b68abf5c9662bf7 | 573d470c9fcb3799e8822e6953e1259b74e0672c | /Course/modules/example_13.py | 4016b39419fd2a8cbe5a54f901788f7a01d1fdc1 | [
"Apache-2.0"
] | permissive | zevgenia/Python_shultais | e6f35773e54a72477ea5ee83520dbecfbee7ff48 | e51c31de221c5e7f36ede857a960138009ec8a05 | refs/heads/master | 2020-03-31T21:46:25.061571 | 2018-10-11T13:43:47 | 2018-10-11T13:43:47 | 152,593,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 70 | py | # -*- coding: utf-8 -*-
import physics5
print(physics5.ek(100, 10))
| [
"[email protected]"
] | |
3c314f559c4a02cdd9399e88687faf1aaef55518 | 062fa6891dfe2278bcfa36a00cc8bed4356e9f5b | /mlflow/tracking/fluent.py | 3acff190107205ff8b9891e6429614e2e541b88d | [
"Apache-2.0"
] | permissive | sepidehhosseinzadeh/mlflow-cpp | f43ffb1dba0e57b9b67fad696966bae683328527 | 724eeaeafbee829201859033315a9d2ebf314844 | refs/heads/master | 2022-12-12T13:41:28.825923 | 2020-06-10T20:42:55 | 2020-06-10T20:42:55 | 158,026,349 | 2 | 0 | Apache-2.0 | 2022-12-08T05:37:42 | 2018-11-17T21:27:07 | Makefile | UTF-8 | Python | false | false | 11,447 | py | """
Internal module implementing the fluent API, allowing management of an active
MLflow run. This module is exposed to users at the top-level :py:mod:`mlflow` module.
"""
from __future__ import print_function
import numbers
import os
import atexit
import sys
import time
from mlflow.entities import Experiment, Run, SourceType, RunInfo
from mlflow.exceptions import MlflowException
from mlflow.tracking.client import MlflowClient
from mlflow.utils import env
from mlflow.utils.databricks_utils import is_in_databricks_notebook, get_notebook_id, \
get_notebook_path, get_webapp_url
from mlflow.utils.logging_utils import eprint
from mlflow.utils.mlflow_tags import MLFLOW_DATABRICKS_WEBAPP_URL, \
MLFLOW_DATABRICKS_NOTEBOOK_PATH, \
MLFLOW_DATABRICKS_NOTEBOOK_ID
from mlflow.utils.validation import _validate_run_id
_EXPERIMENT_ID_ENV_VAR = "MLFLOW_EXPERIMENT_ID"
_RUN_ID_ENV_VAR = "MLFLOW_RUN_ID"
_active_run_stack = []
_active_experiment_id = None
def set_experiment(experiment_name):
"""
Set given experiment as active experiment. If experiment does not exist, create an experiment
with provided name.
:param experiment_name: Name of experiment to be activated.
"""
client = MlflowClient()
experiment = client.get_experiment_by_name(experiment_name)
exp_id = experiment.experiment_id if experiment else None
if not exp_id:
print("INFO: '{}' does not exist. Creating a new experiment".format(experiment_name))
exp_id = client.create_experiment(experiment_name)
global _active_experiment_id
_active_experiment_id = exp_id
class ActiveRun(Run): # pylint: disable=W0223
"""Wrapper around :py:class:`mlflow.entities.Run` to enable using Python ``with`` syntax."""
def __init__(self, run):
Run.__init__(self, run.info, run.data)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
status = "FINISHED" if exc_type is None else "FAILED"
end_run(status)
return exc_type is None
def start_run(run_uuid=None, experiment_id=None, source_name=None, source_version=None,
entry_point_name=None, source_type=None, run_name=None, nested=False):
"""
Start a new MLflow run, setting it as the active run under which metrics and parameters
will be logged. The return value can be used as a context manager within a ``with`` block;
otherwise, you must call ``end_run()`` to terminate the current run.
If you pass a ``run_uuid`` or the ``MLFLOW_RUN_ID`` environment variable is set,
``start_run`` attempts to resume a run with the specified run ID and
other parameters are ignored. ``run_uuid`` takes precedence over ``MLFLOW_RUN_ID``.
:param run_uuid: If specified, get the run with the specified UUID and log parameters
and metrics under that run. The run's end time is unset and its status
is set to running, but the run's other attributes (``source_version``,
``source_type``, etc.) are not changed.
:param experiment_id: ID of the experiment under which to create the current run (applicable
only when ``run_uuid`` is not specified). If ``experiment_id`` argument
is unspecified, will look for valid experiment in the following order:
activated using ``set_experiment``, ``MLFLOW_EXPERIMENT_ID`` env variable,
or the default experiment.
:param source_name: Name of the source file or URI of the project to be associated with the run.
If none provided defaults to the current file.
:param source_version: Optional Git commit hash to associate with the run.
:param entry_point_name: Optional name of the entry point for the current run.
:param source_type: Integer :py:class:`mlflow.entities.SourceType` describing the type
of the run ("local", "project", etc.). Defaults to
:py:class:`mlflow.entities.SourceType.LOCAL` ("local").
:param run_name: Name of new run. Used only when ``run_uuid`` is unspecified.
:param nested: Parameter which must be set to ``True`` to create nested runs.
:return: :py:class:`mlflow.ActiveRun` object that acts as a context manager wrapping
the run's state.
"""
global _active_run_stack
if len(_active_run_stack) > 0 and not nested:
raise Exception(("Run with UUID {} is already active. To start a nested " +
"run call start_run with nested=True").format(
_active_run_stack[0].info.run_uuid))
existing_run_uuid = run_uuid or os.environ.get(_RUN_ID_ENV_VAR, None)
if existing_run_uuid:
_validate_run_id(existing_run_uuid)
active_run_obj = MlflowClient().get_run(existing_run_uuid)
if active_run_obj.info.lifecycle_stage == RunInfo.DELETED_LIFECYCLE:
raise MlflowException("Cannot start run with ID {} because it is in the "
"deleted state.".format(existing_run_uuid))
else:
if len(_active_run_stack) > 0:
parent_run_id = _active_run_stack[-1].info.run_uuid
else:
parent_run_id = None
exp_id_for_run = experiment_id or _get_experiment_id()
if is_in_databricks_notebook():
databricks_tags = {}
notebook_id = get_notebook_id()
notebook_path = get_notebook_path()
webapp_url = get_webapp_url()
if notebook_id is not None:
databricks_tags[MLFLOW_DATABRICKS_NOTEBOOK_ID] = notebook_id
if notebook_path is not None:
databricks_tags[MLFLOW_DATABRICKS_NOTEBOOK_PATH] = notebook_path
if webapp_url is not None:
databricks_tags[MLFLOW_DATABRICKS_WEBAPP_URL] = webapp_url
active_run_obj = MlflowClient().create_run(
experiment_id=exp_id_for_run,
run_name=run_name,
source_name=notebook_path,
source_version=source_version or _get_source_version(),
entry_point_name=entry_point_name,
source_type=SourceType.NOTEBOOK,
tags=databricks_tags,
parent_run_id=parent_run_id)
else:
active_run_obj = MlflowClient().create_run(
experiment_id=exp_id_for_run,
run_name=run_name,
source_name=source_name or _get_source_name(),
source_version=source_version or _get_source_version(),
entry_point_name=entry_point_name,
source_type=source_type or _get_source_type(),
parent_run_id=parent_run_id)
_active_run_stack.append(ActiveRun(active_run_obj))
return _active_run_stack[-1]
def end_run(status="FINISHED"):
"""End an active MLflow run (if there is one)."""
global _active_run_stack
if len(_active_run_stack) > 0:
MlflowClient().set_terminated(_active_run_stack[-1].info.run_uuid, status)
# Clear out the global existing run environment variable as well.
env.unset_variable(_RUN_ID_ENV_VAR)
_active_run_stack.pop()
atexit.register(end_run)
def active_run():
"""Get the currently active ``Run``, or None if no such run exists."""
return _active_run_stack[-1] if len(_active_run_stack) > 0 else None
def log_param(key, value):
"""
Log a parameter under the current run, creating a run if necessary.
:param key: Parameter name (string)
:param value: Parameter value (string, but will be string-ified if not)
"""
run_id = _get_or_start_run().info.run_uuid
MlflowClient().log_param(run_id, key, value)
def set_tag(key, value):
"""
Set a tag under the current run, creating a run if necessary.
:param key: Tag name (string)
:param value: Tag value (string, but will be string-ified if not)
"""
run_id = _get_or_start_run().info.run_uuid
MlflowClient().set_tag(run_id, key, value)
def log_metric(key, value):
"""
Log a metric under the current run, creating a run if necessary.
:param key: Metric name (string).
:param value: Metric value (float).
"""
if not isinstance(value, numbers.Number):
eprint("WARNING: The metric {}={} was not logged because the value is not a number.".format(
key, value))
return
run_id = _get_or_start_run().info.run_uuid
MlflowClient().log_metric(run_id, key, value, int(time.time()))
def log_artifact(local_path, artifact_path=None):
"""
Log a local file or directory as an artifact of the currently active run.
:param local_path: Path to the file to write.
:param artifact_path: If provided, the directory in ``artifact_uri`` to write to.
"""
run_id = _get_or_start_run().info.run_uuid
MlflowClient().log_artifact(run_id, local_path, artifact_path)
def log_artifacts(local_dir, artifact_path=None):
"""
Log all the contents of a local directory as artifacts of the run.
:param local_dir: Path to the directory of files to write.
:param artifact_path: If provided, the directory in ``artifact_uri`` to write to.
"""
run_id = _get_or_start_run().info.run_uuid
MlflowClient().log_artifacts(run_id, local_dir, artifact_path)
def create_experiment(name, artifact_location=None):
"""
Create an experiment.
:param name: The experiment name. Must be unique.
:param artifact_location: The location to store run artifacts.
If not provided, the server picks an appropriate default.
:return: Integer ID of the created experiment.
"""
return MlflowClient().create_experiment(name, artifact_location)
def get_artifact_uri():
"""
Get the artifact URI of the currently active run. Calls to ``log_artifact`` and
``log_artifacts`` write artifact(s) to subdirectories of the returned URI.
"""
return _get_or_start_run().info.artifact_uri
def _get_or_start_run():
if len(_active_run_stack) > 0:
return _active_run_stack[-1]
return start_run()
def _get_main_file():
if len(sys.argv) > 0:
return sys.argv[0]
return None
def _get_source_name():
main_file = _get_main_file()
if main_file is not None:
return main_file
return "<console>"
def _get_source_version():
main_file = _get_main_file()
if main_file is not None:
return _get_git_commit(main_file)
return None
def _get_source_type():
return SourceType.LOCAL
def _get_experiment_id():
return int(_active_experiment_id or
env.get_env(_EXPERIMENT_ID_ENV_VAR) or
Experiment.DEFAULT_EXPERIMENT_ID)
def _get_git_commit(path):
try:
from git import Repo, InvalidGitRepositoryError, GitCommandNotFound, NoSuchPathError
except ImportError as e:
eprint("Notice: failed to import Git (the Git executable is probably not on your PATH),"
" so Git SHA is not available. Error: %s" % e)
return None
try:
if os.path.isfile(path):
path = os.path.dirname(path)
repo = Repo(path, search_parent_directories=True)
commit = repo.head.commit.hexsha
return commit
except (InvalidGitRepositoryError, GitCommandNotFound, ValueError, NoSuchPathError):
return None
| [
"[email protected]"
] | |
7c4402a03d6196f44369aae48542ac89a72a363f | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/sql/azure-mgmt-sql/azure/mgmt/sql/aio/operations/_managed_database_vulnerability_assessment_scans_operations.py | ce14b2f5af91e3cff8bce7e213cbbf2069ae91ee | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 21,958 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ManagedDatabaseVulnerabilityAssessmentScansOperations:
"""ManagedDatabaseVulnerabilityAssessmentScansOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.sql.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _initiate_scan_initial(
self,
resource_group_name: str,
managed_instance_name: str,
database_name: str,
vulnerability_assessment_name: Union[str, "_models.VulnerabilityAssessmentName"],
scan_id: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01-preview"
# Construct URL
url = self._initiate_scan_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'managedInstanceName': self._serialize.url("managed_instance_name", managed_instance_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'vulnerabilityAssessmentName': self._serialize.url("vulnerability_assessment_name", vulnerability_assessment_name, 'str'),
'scanId': self._serialize.url("scan_id", scan_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_initiate_scan_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/scans/{scanId}/initiateScan'} # type: ignore
async def begin_initiate_scan(
self,
resource_group_name: str,
managed_instance_name: str,
database_name: str,
vulnerability_assessment_name: Union[str, "_models.VulnerabilityAssessmentName"],
scan_id: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Executes a Vulnerability Assessment database scan.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:param database_name: The name of the database.
:type database_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment.
:type vulnerability_assessment_name: str or ~azure.mgmt.sql.models.VulnerabilityAssessmentName
:param scan_id: The vulnerability assessment scan Id of the scan to retrieve.
:type scan_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._initiate_scan_initial(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
database_name=database_name,
vulnerability_assessment_name=vulnerability_assessment_name,
scan_id=scan_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'managedInstanceName': self._serialize.url("managed_instance_name", managed_instance_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'vulnerabilityAssessmentName': self._serialize.url("vulnerability_assessment_name", vulnerability_assessment_name, 'str'),
'scanId': self._serialize.url("scan_id", scan_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_initiate_scan.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/scans/{scanId}/initiateScan'} # type: ignore
async def export(
self,
resource_group_name: str,
managed_instance_name: str,
database_name: str,
vulnerability_assessment_name: Union[str, "_models.VulnerabilityAssessmentName"],
scan_id: str,
**kwargs: Any
) -> "_models.DatabaseVulnerabilityAssessmentScansExport":
"""Convert an existing scan result to a human readable format. If already exists nothing happens.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:param database_name: The name of the scanned database.
:type database_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment.
:type vulnerability_assessment_name: str or ~azure.mgmt.sql.models.VulnerabilityAssessmentName
:param scan_id: The vulnerability assessment scan Id.
:type scan_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DatabaseVulnerabilityAssessmentScansExport, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.DatabaseVulnerabilityAssessmentScansExport
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DatabaseVulnerabilityAssessmentScansExport"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = self.export.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'managedInstanceName': self._serialize.url("managed_instance_name", managed_instance_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'vulnerabilityAssessmentName': self._serialize.url("vulnerability_assessment_name", vulnerability_assessment_name, 'str'),
'scanId': self._serialize.url("scan_id", scan_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('DatabaseVulnerabilityAssessmentScansExport', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('DatabaseVulnerabilityAssessmentScansExport', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
export.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/scans/{scanId}/export'} # type: ignore
def list_by_database(
self,
resource_group_name: str,
managed_instance_name: str,
database_name: str,
vulnerability_assessment_name: Union[str, "_models.VulnerabilityAssessmentName"],
**kwargs: Any
) -> AsyncIterable["_models.VulnerabilityAssessmentScanRecordListResult"]:
"""Lists the vulnerability assessment scans of a database.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:param database_name: The name of the database.
:type database_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment.
:type vulnerability_assessment_name: str or ~azure.mgmt.sql.models.VulnerabilityAssessmentName
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VulnerabilityAssessmentScanRecordListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.sql.models.VulnerabilityAssessmentScanRecordListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VulnerabilityAssessmentScanRecordListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_database.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'managedInstanceName': self._serialize.url("managed_instance_name", managed_instance_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'vulnerabilityAssessmentName': self._serialize.url("vulnerability_assessment_name", vulnerability_assessment_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('VulnerabilityAssessmentScanRecordListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_database.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/scans'} # type: ignore
async def get(
self,
resource_group_name: str,
managed_instance_name: str,
database_name: str,
vulnerability_assessment_name: Union[str, "_models.VulnerabilityAssessmentName"],
scan_id: str,
**kwargs: Any
) -> "_models.VulnerabilityAssessmentScanRecord":
"""Gets a vulnerability assessment scan record of a database.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:param database_name: The name of the database.
:type database_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment.
:type vulnerability_assessment_name: str or ~azure.mgmt.sql.models.VulnerabilityAssessmentName
:param scan_id: The vulnerability assessment scan Id of the scan to retrieve.
:type scan_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VulnerabilityAssessmentScanRecord, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.VulnerabilityAssessmentScanRecord
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VulnerabilityAssessmentScanRecord"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'managedInstanceName': self._serialize.url("managed_instance_name", managed_instance_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'vulnerabilityAssessmentName': self._serialize.url("vulnerability_assessment_name", vulnerability_assessment_name, 'str'),
'scanId': self._serialize.url("scan_id", scan_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VulnerabilityAssessmentScanRecord', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}/scans/{scanId}'} # type: ignore
| [
"[email protected]"
] | |
6e6e2d47512cdbd5432c4001855a5b23687540ae | 0add7953d3e3ce2df9e8265102be39b758579753 | /built-in/TensorFlow/Research/cv/image_classification/Cars_for_TensorFlow/automl/vega/search_space/networks/tensorflow/backbones/prune_resnet.py | a5b32faa79ca4e832eea9a0341de8e9c088213d8 | [
"Apache-2.0",
"MIT"
] | permissive | Huawei-Ascend/modelzoo | ae161c0b4e581f8b62c77251e9204d958c4cf6c4 | df51ed9c1d6dbde1deef63f2a037a369f8554406 | refs/heads/master | 2023-04-08T08:17:40.058206 | 2020-12-07T08:04:57 | 2020-12-07T08:04:57 | 319,219,518 | 1 | 1 | Apache-2.0 | 2023-03-24T22:22:00 | 2020-12-07T06:01:32 | Python | UTF-8 | Python | false | false | 3,242 | py | # -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""ResNet models for pruning."""
import tensorflow as tf
from vega.search_space.networks import NetTypes, NetworkFactory
from ..blocks.resnet_block import _prune_basic_block
@NetworkFactory.register(NetTypes.BACKBONE)
class PruneResNet(object):
"""PruneResNet.
:param descript: network desc
:type descript: dict
"""
def __init__(self, descript):
"""Init PruneResNet."""
self.net_desc = descript
self.block = _prune_basic_block
self.encoding = descript.get('encoding')
self.chn = descript.get('chn')
self.chn_node = descript.get('chn_node')
self.chn_mask = descript.get('chn_mask', None)
self.chn_node_mask = descript.get('chn_node_mask', None)
self.num_blocks = descript.get('num_blocks', [3, 3, 3])
self.num_classes = descript.get('num_classes', 10)
self.in_planes = self.chn_node[0]
self.data_format = "channels_first"
self.scope_name = 'PruneResnet'
def _forward_prune_block(self, x, bottleneck, block, planes, inner_planes, num_blocks, stride, training, name):
"""Create resolution block of ResNet."""
idx = 0
strides = [stride] + [1] * (num_blocks - 1)
expansion = 4 if bottleneck else 1
for stride in strides:
x = block(x, planes, inner_planes[idx], training, self.data_format,
name + '/block_' + str(idx), strides=stride)
self.in_planes = planes * expansion
idx += 1
return x
def __call__(self, x, training):
"""Forward function of ResNet."""
if self.data_format == 'channels_first':
x = tf.transpose(x, [0, 3, 1, 2])
x = tf.layers.conv2d(x, self.chn_node[0], 3, padding='same', use_bias=False,
data_format=self.data_format, name='conv_1')
x = tf.layers.batch_normalization(x, axis=1 if self.data_format == 'channels_first' else 3,
name='bn_1', training=training)
x = self._forward_prune_block(x, False, self.block, self.chn_node[1], self.chn[0:3],
self.num_blocks[0], stride=1, training=training, name='layer_1')
x = self._forward_prune_block(x, False, self.block, self.chn_node[2], self.chn[3:6],
self.num_blocks[1], stride=2, training=training, name='layer_2')
x = self._forward_prune_block(x, False, self.block, self.chn_node[3], self.chn[6:9],
self.num_blocks[2], stride=2, training=training, name='layer_3')
x = tf.nn.relu(x)
x = tf.reduce_mean(x, [-2, -1], keepdims=True)
out = tf.layers.dense(tf.reshape(x, [x.get_shape()[0], -1]), self.num_classes)
return out
| [
"[email protected]"
] | |
2c4aed5ed7cb5ee974c114d183f12163eb2fc911 | d331f11cf1e779e5ccf72c20f700388d07065b19 | /BB_HRRR/GLM_and_HRRR/save_GLM_HRRR_binary_fields.py | 5e56de136638fc61b18ed129afce5c529ea38bc9 | [] | no_license | geofbaum/pyBKB_v3 | cd54d886348547a67a712e4aa1f8299e43667e39 | 8cf483e5296c8b38b57e31a0f65ea29762b57f38 | refs/heads/master | 2020-07-02T18:39:46.296555 | 2019-08-08T18:36:31 | 2019-08-08T18:36:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,389 | py | ## Brian Blaylock
## May 7, 2019
"""
Generate HRRR-GLM binary Lightning tables and store data in a dictionary.
Save the dictionary for later use. Store on Horel-Group8 (approx. 150 GB).
"""
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime, timedelta
import multiprocessing
import os
import sys
sys.path.append('/uufs/chpc.utah.edu/common/home/u0553130/pyBKB_v3/')
from BB_HRRR.HRRR_Pando import get_hrrr_variable, get_hrrr_latlon
from BB_GOES.get_GOES import get_GOES_nearesttime
from BB_GOES.get_GLM import get_GLM_file_nearesttime, accumulate_GLM_FAST, filter_by_HRRR
from BB_datetimes.range import range_dates
from BB_HRRR.GLM_and_HRRR.GLM_events_HRRR import get_GLM_HRRR_contingency_stats, domains
def get_and_save(DATE):
print(DATE)
BASE = '/uufs/chpc.utah.edu/common/home/horel-group8/blaylock/GLM-HRRR_LTNG_binary/'
FILE = BASE+'/HRRR-GLM-Binary_%s' % DATE.strftime('%Y-%m-%d_%H%M')
if not os.path.exists(FILE):
# This function will write the file if it isn't available
a = get_GLM_HRRR_contingency_stats(DATE)
#sDATE = datetime(2018, 5, 1)
#eDATE = datetime(2018, 8, 1)
sDATE = datetime(2018, 8, 1)
eDATE = datetime(2018, 10, 1)
DATES = range_dates(sDATE, eDATE, HOURS=1)
list(map(get_and_save, DATES))
# NOTE: Can't use multiprocessing because the get_GLM_HRRR_contingency_stats
# uses it instead. | [
"[email protected]"
] | |
6c64949db5c6e14dc709d406fa53c415dabe40cb | fa7d04cb1eb932d609cc2e4567920b77b2080b57 | /基础学习/python_work/Chapter 6/people_message_homework.py | a9596bcd4f98211383f405e04153752e22902adf | [] | no_license | Yangqqiamg/Python-text | 3b74088781e46a21534dcded49e8deae9c458e5d | 426e15d4993c1658909a4e1a848829d09b029c96 | refs/heads/master | 2020-04-11T15:07:06.825475 | 2019-01-01T14:32:19 | 2019-01-01T14:32:19 | 161,880,306 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | py | #one
people_message = {
'first_name': 'add',
'last_name': 'mike',
'age': 24,
'city': 'shanghai',
}
print(people_message)
#two
people_num = {
'mike': 6,
'joe': 8,
'mary': 9,
'lihua': 15,
}
print('joe like ' + str(people_num['joe']))
| [
"[email protected]"
] | |
183e6bcb3efbec82b96f70e5665e476ceaf85779 | 3ac9deb93c7ef377749e1a92589757ff70e95011 | /modules/python_file.py | 788862f475b882abaf33013495a9abd80862cc83 | [] | no_license | sambapython/raisingstarts | 6afc9ecfe8b6c21d1cbed87802465a0de581ddc8 | c186dd0d58ecfd845c5092ddf4801e42ef698cae | refs/heads/master | 2021-01-20T16:09:36.802216 | 2016-09-20T08:36:55 | 2016-09-20T08:36:55 | 62,765,486 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 344 | py | import file1
import file2
import file3
import mod1
import sqlite3
print mod1.f1.fun1()
print (file1.fun1())
print (file3.fun3())
try:
con=sqlite3.connect("db2.db")
#con.execute("create table persons(id int,name varchar(60))")
#con.commit()
except Exception as err:
print err
finally:
pass
#con.close()
#print (file3.fun4())
#import file2 | [
"[email protected]"
] | |
b3eaae4f74a3d95d15111b18db8173f26b41fbb7 | ff1e3e87e0432173e67782e4c529701426918624 | /PRODUSTRY/asgi.py | d4c0c582eb72d65389b47fbaef83d4187dcc768b | [] | no_license | Rayhun/Produstry | dd7c13e39114f893d4ba3ea012d903ea1723d9ff | 325e74c573c333f00f0842c14d92052818a85d5c | refs/heads/master | 2023-04-22T20:52:49.058342 | 2021-05-11T19:47:13 | 2021-05-11T19:47:13 | 294,981,881 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | """
ASGI config for PRODUSTRY project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'PRODUSTRY.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
a90afeed2963880cc5342b8ffd1de1542126abeb | 2b7434bef25ce9c1d603faa4c63eebc6ea629e7c | /backend/lizz_8_19_events_19655/settings.py | 619ecde25e9e8d41c048934650eaae9d2e43da81 | [] | no_license | crowdbotics-apps/lizz-8-19-events-19655 | 56052b34125ecc66283a1e53930f20bc4b50d500 | 1da47d86b2323cabfca599e76f796972c79dae67 | refs/heads/master | 2022-12-05T21:21:25.258180 | 2020-08-19T18:28:27 | 2020-08-19T18:28:27 | 288,789,461 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,867 | py | """
Django settings for lizz_8_19_events_19655 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sites",
"event",
]
LOCAL_APPS = [
"home",
"users.apps.UsersConfig",
]
THIRD_PARTY_APPS = [
"rest_framework",
"rest_framework.authtoken",
"rest_auth",
"rest_auth.registration",
"bootstrap4",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"django_extensions",
"drf_yasg",
# start fcm_django push notifications
"fcm_django",
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "lizz_8_19_events_19655.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "lizz_8_19_events_19655.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {"default": env.db()}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
MIDDLEWARE += ["whitenoise.middleware.WhiteNoiseMiddleware"]
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG:
# output email to console instead of sending
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
] | |
a42149122e6f49793dcbcd256512cec4d76d656a | 04975a41eb459f1528dcbdcb1143a3cb535aa620 | /Dynamic_easy/inter_08_01.py | 24efcaf21c829d2f7f98ade60f94fb5317623546 | [] | no_license | RickLee910/Leetcode_easy | 2a50d632379826979a985e1b9950d4cf6bbd8b18 | c2687daf334f96a908737067bb915b8b072d0d56 | refs/heads/master | 2023-01-29T11:09:26.701243 | 2020-12-02T04:36:14 | 2020-12-02T04:36:14 | 294,952,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | class Solution:
def waysToStep(self, n: int) -> int:
if n == 1 or n == 2:
return n
if n == 3:
return 4
a, b, c = 1, 2, 4
for i in range(3, n + 1):
a, b, c = b, c, (a + b + c) % (1000000007)
return c
s = Solution()
a = 5
print(s.waysToStep(a)) | [
"[email protected]"
] | |
ff5069979385848256736247d30c20bb6994c6b2 | 239464e12610791457d393e2573f79babd776456 | /njupt/models/aolan.py | 4565792529ff5435486f548d760f855b108507df | [] | no_license | sanmumuzi/NJUPT-API | a6f48a963521b36940d59525cc730811561fbae6 | 621960fb0749e34da8c22e809b5264639071a853 | refs/heads/master | 2021-09-01T05:02:37.669007 | 2017-12-25T00:20:59 | 2017-12-25T00:20:59 | 115,200,611 | 0 | 0 | null | 2017-12-23T14:27:14 | 2017-12-23T14:27:14 | null | UTF-8 | Python | false | false | 2,605 | py | import hashlib
from njupt import settings
from njupt.models.base import Model
from njupt.urls import URL
class Aolan(Model):
def login(self, account, password):
"""
登录奥兰系统 jwxt.njupt.edu.cn
:param account: 南邮学号、考生号、身份证号
:param password: 密码
:return: {'r': 1, "msg": "登录失败"} 或 {'r': 0, 'msg': '登录成功'}
"""
data = {
"__VIEWSTATE": self._get_viewstate(URL.aolan_login()),
'__VIEWSTATEGENERATOR': self._get_viewstategenerator(URL.aolan_login()),
'userbh': account,
'pas2s': hashlib.md5(password.upper().encode('utf8')).hexdigest(),
"vcode": self._get_captcha(URL.aolan_captcha()),
"cw": "",
"xzbz": "1",
}
return self._login_execute(url=URL.aolan_login(), data=data)
def _login_execute(self, url=None, data=None):
r = self.post(url=url, data=data)
if r.ok:
if "辅导员评议" in r.text:
self.cookies.save(ignore_discard=True) # 保存登录信息cookies
self.cookies.load(filename=settings.COOKIES_FILE, ignore_discard=True)
return {'r': 0, 'msg': '登录成功'}
else:
return {'r': 1, 'msg': '检查账号密码验证码是否正确'}
else:
return {'r': 1, "msg": "登录失败"}
class LibAccount(Model):
def login(self, account, password):
"""
登录南邮图书馆 jwxt.njupt.edu.cn
:param account: 南邮学号
:param password: 密码
:return: {'r': 1, "msg": "登录失败"} 或 {'r': 0, 'msg': '登录成功'}
"""
data = {
"number": account,
'passwd': password,
'captcha': self._get_captcha(URL.lib_captcha()),
'select': "cert_no",
"returnUrl": "",
}
return self._login_execute(url=URL.jwxt_login(), data=data)
def _login_execute(self, url=None, data=None):
r =self.post(url=url, data=data)
if r.ok:
print(r.text)
if "请到信息维护中完善个人联系方式" in r.text:
self.cookies.save(ignore_discard=True) # 保存登录信息cookies
self.cookies.load(filename=settings.COOKIES_FILE, ignore_discard=True)
return {'r': 0, 'msg': '登录成功'}
else:
return {'r': 1, 'msg': '检查账号密码验证码是否正确'}
else:
return {'r': 1, "msg": "登录失败"}
| [
"[email protected]"
] | |
38ba2d08e22e3d858578639be012ec3575f7804f | d2cb930ed5df0b1b5f7944e00f6f884bf014803d | /yeko_demo/yeko_demo/settings.py | 2b5ddd20cb874abdfd2ed78b7de8f06e217beff2 | [] | no_license | sixDegree/python-scrapy-demo | 3cae4298b01edab65449cfe9af56b2fa59f4c07d | b66530e54156be8c7877f1fc4d497fd497b6fdda | refs/heads/master | 2020-06-17T03:16:23.038061 | 2019-07-08T09:25:15 | 2019-07-08T09:25:15 | 195,777,787 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,478 | py | # -*- coding: utf-8 -*-
# Scrapy settings for yeko_demo project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://doc.scrapy.org/en/latest/topics/settings.html
# https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'yeko_demo'
SPIDER_MODULES = ['yeko_demo.spiders']
NEWSPIDER_MODULE = 'yeko_demo.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'yeko_demo (+http://www.yourdomain.com)'
USER_AGENT='Mozilla/5.0 (Windows NT 6.1; WOW64; rv:60.0) Gecko/20100101 Firefox/60.0'
MONGO_CONN_STR="mongodb://cj:123456@localhost:27017/?authSource=admin"
LOG_LEVEL = 'INFO'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://doc.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See https://doc.scrapy.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'yeko_demo.middlewares.YekoDemoSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'yeko_demo.middlewares.YekoDemoDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See https://doc.scrapy.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See https://doc.scrapy.org/en/latest/topics/item-pipeline.html
ITEM_PIPELINES = {
#'yeko_demo.pipelines.YekoDemoPipeline': 300,
'yeko_demo.pipelines.MeterialsPipeline':300,
'yeko_demo.pipelines.MongoPipeline':310,
#'yeko_demo.pipelines.CommentPipeline':320
}
ITEM_STORE='./'
FILES_STORE='./meterials'
FILES_EXPIRES = 90
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://doc.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"[email protected]"
] | |
beaf7eedb60db50e4bbda23836ec2a0cd5e63f0a | ea4e24693bddf0d986e7a4dd84a9208b3c28536f | /paayesh/wsgi.py | 6a178fc93c375ca0905a9ebd480f764faf773bb1 | [] | no_license | Amirsorouri00/Django-Modular-Components | ec28812ab603c021a83080c701799f5e0d6a127b | d982c77ef291411718a19bf80d90fa61be65891a | refs/heads/master | 2020-04-14T13:12:05.182391 | 2019-01-06T14:27:20 | 2019-01-06T14:27:20 | 163,862,171 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | """
WSGI config for paayesh project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paayesh.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
fa966d0266e2001c844d1fe290ad644e65465252 | f2641cd50b3c875ade04ea7537b7e1df7cadb688 | /pycon_schemas/version.py | 9ad58278b78ad8f19fb03944cb3683d9d0140a1b | [
"MIT"
] | permissive | PythonSanSebastian/pycon-schemas | 35c4dabe6a89d1d77aa5bc709740544b87c8c130 | 645ec032d4976fd45b3b050bd71820816212cf68 | refs/heads/master | 2022-07-07T01:44:13.295431 | 2019-10-25T20:05:12 | 2019-10-25T20:05:12 | 216,343,062 | 1 | 0 | MIT | 2020-07-01T16:57:51 | 2019-10-20T10:20:44 | Python | UTF-8 | Python | false | false | 60 | py | """Release version number."""
__version__ = '0.0.1' # noqa
| [
"[email protected]"
] | |
881c5c08827486887ff44acd266e89c45dddfd03 | 8898273f9811fab29eb5621734bafcdf204d8229 | /scipy-stubs/integrate/_ivp/common.pyi | 81dabab39e5cfc7ed3b65529e100e1954d3d90d1 | [] | no_license | tyrion/scipy-stubs | 628ad6321a7e1502683a2b55a759777508ab4b67 | bf49a91313523c4f635bc3e5d14444c1361caf64 | refs/heads/master | 2020-05-30T21:59:43.001510 | 2019-06-03T10:30:54 | 2019-06-03T10:30:54 | 189,984,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,267 | pyi | # Stubs for scipy.integrate._ivp.common (Python 3.6)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from typing import Any, Optional
EPS: Any
def validate_max_step(max_step: Any): ...
def warn_extraneous(extraneous: Any) -> None: ...
def validate_tol(rtol: Any, atol: Any, n: Any): ...
def norm(x: Any): ...
def select_initial_step(fun: Any, t0: Any, y0: Any, f0: Any, direction: Any, order: Any, rtol: Any, atol: Any): ...
class OdeSolution:
n_segments: Any = ...
ts: Any = ...
interpolants: Any = ...
t_min: Any = ...
t_max: Any = ...
ascending: bool = ...
ts_sorted: Any = ...
def __init__(self, ts: Any, interpolants: Any) -> None: ...
def _call_single(self, t: Any): ...
def __call__(self, t: Any): ...
NUM_JAC_DIFF_REJECT: Any
NUM_JAC_DIFF_SMALL: Any
NUM_JAC_DIFF_BIG: Any
NUM_JAC_MIN_FACTOR: Any
NUM_JAC_FACTOR_INCREASE: int
NUM_JAC_FACTOR_DECREASE: float
def num_jac(fun: Any, t: Any, y: Any, f: Any, threshold: Any, factor: Any, sparsity: Optional[Any] = ...): ...
def _dense_num_jac(fun: Any, t: Any, y: Any, f: Any, h: Any, factor: Any, y_scale: Any): ...
def _sparse_num_jac(fun: Any, t: Any, y: Any, f: Any, h: Any, factor: Any, y_scale: Any, structure: Any, groups: Any): ...
| [
"[email protected]"
] | |
9f9d2fbbb6901dac95d02409b10aefadfdaf84b4 | a0a9fca2b93a53625e2043e0c64da29388ff8613 | /tests/unit/virt/libvirt/fakelibvirt.py | 75cd4ef9abb7c3212466edc4bc50a6f62e76f076 | [] | no_license | bopopescu/nova-fet-demo | 8460602a54e897179303d50f7141dc5518b1b2da | b435913a306e97288294933afaffb3d457f548a2 | refs/heads/master | 2022-11-20T08:31:09.019870 | 2017-08-29T06:45:23 | 2017-08-29T06:45:23 | 282,086,612 | 0 | 0 | null | 2020-07-24T00:40:07 | 2020-07-24T00:40:05 | null | UTF-8 | Python | false | false | 41,364 | py | # Copyright 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import uuid
import fixtures
from lxml import etree
import six
from nova.compute import arch
from nova.virt.libvirt import config as vconfig
# Allow passing None to the various connect methods
# (i.e. allow the client to rely on default URLs)
allow_default_uri_connection = True
# Has libvirt connection been used at least once
connection_used = False
def _reset():
global allow_default_uri_connection
allow_default_uri_connection = True
# virDomainState
VIR_DOMAIN_NOSTATE = 0
VIR_DOMAIN_RUNNING = 1
VIR_DOMAIN_BLOCKED = 2
VIR_DOMAIN_PAUSED = 3
VIR_DOMAIN_SHUTDOWN = 4
VIR_DOMAIN_SHUTOFF = 5
VIR_DOMAIN_CRASHED = 6
# NOTE(mriedem): These values come from include/libvirt/libvirt-domain.h
VIR_DOMAIN_XML_SECURE = 1
VIR_DOMAIN_XML_INACTIVE = 2
VIR_DOMAIN_XML_UPDATE_CPU = 4
VIR_DOMAIN_XML_MIGRATABLE = 8
VIR_DOMAIN_BLOCK_REBASE_SHALLOW = 1
VIR_DOMAIN_BLOCK_REBASE_REUSE_EXT = 2
VIR_DOMAIN_BLOCK_REBASE_COPY = 8
VIR_DOMAIN_BLOCK_JOB_ABORT_ASYNC = 1
VIR_DOMAIN_BLOCK_JOB_ABORT_PIVOT = 2
VIR_DOMAIN_EVENT_ID_LIFECYCLE = 0
VIR_DOMAIN_EVENT_DEFINED = 0
VIR_DOMAIN_EVENT_UNDEFINED = 1
VIR_DOMAIN_EVENT_STARTED = 2
VIR_DOMAIN_EVENT_SUSPENDED = 3
VIR_DOMAIN_EVENT_RESUMED = 4
VIR_DOMAIN_EVENT_STOPPED = 5
VIR_DOMAIN_EVENT_SHUTDOWN = 6
VIR_DOMAIN_EVENT_PMSUSPENDED = 7
VIR_DOMAIN_UNDEFINE_MANAGED_SAVE = 1
VIR_DOMAIN_UNDEFINE_NVRAM = 4
VIR_DOMAIN_AFFECT_CURRENT = 0
VIR_DOMAIN_AFFECT_LIVE = 1
VIR_DOMAIN_AFFECT_CONFIG = 2
VIR_CPU_COMPARE_ERROR = -1
VIR_CPU_COMPARE_INCOMPATIBLE = 0
VIR_CPU_COMPARE_IDENTICAL = 1
VIR_CPU_COMPARE_SUPERSET = 2
VIR_CRED_USERNAME = 1
VIR_CRED_AUTHNAME = 2
VIR_CRED_LANGUAGE = 3
VIR_CRED_CNONCE = 4
VIR_CRED_PASSPHRASE = 5
VIR_CRED_ECHOPROMPT = 6
VIR_CRED_NOECHOPROMPT = 7
VIR_CRED_REALM = 8
VIR_CRED_EXTERNAL = 9
VIR_MIGRATE_LIVE = 1
VIR_MIGRATE_PEER2PEER = 2
VIR_MIGRATE_TUNNELLED = 4
VIR_MIGRATE_PERSIST_DEST = 8
VIR_MIGRATE_UNDEFINE_SOURCE = 16
VIR_MIGRATE_NON_SHARED_INC = 128
VIR_MIGRATE_AUTO_CONVERGE = 8192
VIR_MIGRATE_POSTCOPY = 32768
VIR_NODE_CPU_STATS_ALL_CPUS = -1
VIR_DOMAIN_START_PAUSED = 1
# libvirtError enums
# (Intentionally different from what's in libvirt. We do this to check,
# that consumers of the library are using the symbolic names rather than
# hardcoding the numerical values)
VIR_FROM_QEMU = 100
VIR_FROM_DOMAIN = 200
VIR_FROM_NWFILTER = 330
VIR_FROM_REMOTE = 340
VIR_FROM_RPC = 345
VIR_FROM_NODEDEV = 666
VIR_ERR_INVALID_ARG = 8
VIR_ERR_NO_SUPPORT = 3
VIR_ERR_XML_DETAIL = 350
VIR_ERR_NO_DOMAIN = 420
VIR_ERR_OPERATION_FAILED = 510
VIR_ERR_OPERATION_INVALID = 55
VIR_ERR_OPERATION_TIMEOUT = 68
VIR_ERR_NO_NWFILTER = 620
VIR_ERR_SYSTEM_ERROR = 900
VIR_ERR_INTERNAL_ERROR = 950
VIR_ERR_CONFIG_UNSUPPORTED = 951
VIR_ERR_NO_NODE_DEVICE = 667
VIR_ERR_NO_SECRET = 66
VIR_ERR_AGENT_UNRESPONSIVE = 86
VIR_ERR_ARGUMENT_UNSUPPORTED = 74
# Readonly
VIR_CONNECT_RO = 1
# virConnectBaselineCPU flags
VIR_CONNECT_BASELINE_CPU_EXPAND_FEATURES = 1
# snapshotCreateXML flags
VIR_DOMAIN_SNAPSHOT_CREATE_NO_METADATA = 4
VIR_DOMAIN_SNAPSHOT_CREATE_DISK_ONLY = 16
VIR_DOMAIN_SNAPSHOT_CREATE_REUSE_EXT = 32
VIR_DOMAIN_SNAPSHOT_CREATE_QUIESCE = 64
# blockCommit flags
VIR_DOMAIN_BLOCK_COMMIT_RELATIVE = 4
# blockRebase flags
VIR_DOMAIN_BLOCK_REBASE_RELATIVE = 8
VIR_CONNECT_LIST_DOMAINS_ACTIVE = 1
VIR_CONNECT_LIST_DOMAINS_INACTIVE = 2
# secret type
VIR_SECRET_USAGE_TYPE_NONE = 0
VIR_SECRET_USAGE_TYPE_VOLUME = 1
VIR_SECRET_USAGE_TYPE_CEPH = 2
VIR_SECRET_USAGE_TYPE_ISCSI = 3
# Libvirt version to match MIN_LIBVIRT_VERSION in driver.py
FAKE_LIBVIRT_VERSION = 1002001
# Libvirt version to match MIN_QEMU_VERSION in driver.py
FAKE_QEMU_VERSION = 1005003
class HostInfo(object):
def __init__(self, arch=arch.X86_64, kB_mem=4096,
cpus=2, cpu_mhz=800, cpu_nodes=1,
cpu_sockets=1, cpu_cores=2,
cpu_threads=1, cpu_model="Penryn",
cpu_vendor="Intel", numa_topology='',
cpu_disabled=None):
"""Create a new Host Info object
:param arch: (string) indicating the CPU arch
(eg 'i686' or whatever else uname -m might return)
:param kB_mem: (int) memory size in KBytes
:param cpus: (int) the number of active CPUs
:param cpu_mhz: (int) expected CPU frequency
:param cpu_nodes: (int) the number of NUMA cell, 1 for unusual
NUMA topologies or uniform
:param cpu_sockets: (int) number of CPU sockets per node if nodes > 1,
total number of CPU sockets otherwise
:param cpu_cores: (int) number of cores per socket
:param cpu_threads: (int) number of threads per core
:param cpu_model: CPU model
:param cpu_vendor: CPU vendor
:param numa_topology: Numa topology
:param cpu_disabled: List of disabled cpus
"""
self.arch = arch
self.kB_mem = kB_mem
self.cpus = cpus
self.cpu_mhz = cpu_mhz
self.cpu_nodes = cpu_nodes
self.cpu_cores = cpu_cores
self.cpu_threads = cpu_threads
self.cpu_sockets = cpu_sockets
self.cpu_model = cpu_model
self.cpu_vendor = cpu_vendor
self.numa_topology = numa_topology
self.disabled_cpus_list = cpu_disabled or []
@classmethod
def _gen_numa_topology(self, cpu_nodes, cpu_sockets, cpu_cores,
cpu_threads, kb_mem, numa_mempages_list=None):
topology = vconfig.LibvirtConfigCapsNUMATopology()
cpu_count = 0
for cell_count in range(cpu_nodes):
cell = vconfig.LibvirtConfigCapsNUMACell()
cell.id = cell_count
cell.memory = kb_mem / cpu_nodes
for socket_count in range(cpu_sockets):
for cpu_num in range(cpu_cores * cpu_threads):
cpu = vconfig.LibvirtConfigCapsNUMACPU()
cpu.id = cpu_count
cpu.socket_id = cell_count
cpu.core_id = cpu_num // cpu_threads
cpu.siblings = set([cpu_threads *
(cpu_count // cpu_threads) + thread
for thread in range(cpu_threads)])
cell.cpus.append(cpu)
cpu_count += 1
# Set mempages per numa cell. if numa_mempages_list is empty
# we will set only the default 4K pages.
if numa_mempages_list:
mempages = numa_mempages_list[cell_count]
else:
mempages = vconfig.LibvirtConfigCapsNUMAPages()
mempages.size = 4
mempages.total = cell.memory / mempages.size
mempages = [mempages]
cell.mempages = mempages
topology.cells.append(cell)
return topology
def get_numa_topology(self):
return self.numa_topology
VIR_DOMAIN_JOB_NONE = 0
VIR_DOMAIN_JOB_BOUNDED = 1
VIR_DOMAIN_JOB_UNBOUNDED = 2
VIR_DOMAIN_JOB_COMPLETED = 3
VIR_DOMAIN_JOB_FAILED = 4
VIR_DOMAIN_JOB_CANCELLED = 5
def _parse_disk_info(element):
disk_info = {}
disk_info['type'] = element.get('type', 'file')
disk_info['device'] = element.get('device', 'disk')
driver = element.find('./driver')
if driver is not None:
disk_info['driver_name'] = driver.get('name')
disk_info['driver_type'] = driver.get('type')
source = element.find('./source')
if source is not None:
disk_info['source'] = source.get('file')
if not disk_info['source']:
disk_info['source'] = source.get('dev')
if not disk_info['source']:
disk_info['source'] = source.get('path')
target = element.find('./target')
if target is not None:
disk_info['target_dev'] = target.get('dev')
disk_info['target_bus'] = target.get('bus')
return disk_info
def disable_event_thread(self):
"""Disable nova libvirt driver event thread.
The Nova libvirt driver includes a native thread which monitors
the libvirt event channel. In a testing environment this becomes
problematic because it means we've got a floating thread calling
sleep(1) over the life of the unit test. Seems harmless? It's not,
because we sometimes want to test things like retry loops that
should have specific sleep paterns. An unlucky firing of the
libvirt thread will cause a test failure.
"""
# because we are patching a method in a class MonkeyPatch doesn't
# auto import correctly. Import explicitly otherwise the patching
# may silently fail.
import nova.virt.libvirt.host # noqa
def evloop(*args, **kwargs):
pass
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.host.Host._init_events',
evloop))
class libvirtError(Exception):
"""This class was copied and slightly modified from
`libvirt-python:libvirt-override.py`.
Since a test environment will use the real `libvirt-python` version of
`libvirtError` if it's installed and not this fake, we need to maintain
strict compatibility with the original class, including `__init__` args
and instance-attributes.
To create a libvirtError instance you should:
# Create an unsupported error exception
exc = libvirtError('my message')
exc.err = (libvirt.VIR_ERR_NO_SUPPORT,)
self.err is a tuple of form:
(error_code, error_domain, error_message, error_level, str1, str2,
str3, int1, int2)
Alternatively, you can use the `make_libvirtError` convenience function to
allow you to specify these attributes in one shot.
"""
def __init__(self, defmsg, conn=None, dom=None, net=None, pool=None,
vol=None):
Exception.__init__(self, defmsg)
self.err = None
def get_error_code(self):
if self.err is None:
return None
return self.err[0]
def get_error_domain(self):
if self.err is None:
return None
return self.err[1]
def get_error_message(self):
if self.err is None:
return None
return self.err[2]
def get_error_level(self):
if self.err is None:
return None
return self.err[3]
def get_str1(self):
if self.err is None:
return None
return self.err[4]
def get_str2(self):
if self.err is None:
return None
return self.err[5]
def get_str3(self):
if self.err is None:
return None
return self.err[6]
def get_int1(self):
if self.err is None:
return None
return self.err[7]
def get_int2(self):
if self.err is None:
return None
return self.err[8]
class NWFilter(object):
def __init__(self, connection, xml):
self._connection = connection
self._xml = xml
self._parse_xml(xml)
def _parse_xml(self, xml):
tree = etree.fromstring(xml)
root = tree.find('.')
self._name = root.get('name')
def undefine(self):
self._connection._remove_filter(self)
class NodeDevice(object):
def __init__(self, connection, xml=None):
self._connection = connection
self._xml = xml
if xml is not None:
self._parse_xml(xml)
def _parse_xml(self, xml):
tree = etree.fromstring(xml)
root = tree.find('.')
self._name = root.get('name')
def attach(self):
pass
def dettach(self):
pass
def reset(self):
pass
class Domain(object):
def __init__(self, connection, xml, running=False, transient=False):
self._connection = connection
if running:
connection._mark_running(self)
self._state = running and VIR_DOMAIN_RUNNING or VIR_DOMAIN_SHUTOFF
self._transient = transient
self._def = self._parse_definition(xml)
self._has_saved_state = False
self._snapshots = {}
self._id = self._connection._id_counter
def _parse_definition(self, xml):
try:
tree = etree.fromstring(xml)
except etree.ParseError:
raise make_libvirtError(
libvirtError, "Invalid XML.",
error_code=VIR_ERR_XML_DETAIL,
error_domain=VIR_FROM_DOMAIN)
definition = {}
name = tree.find('./name')
if name is not None:
definition['name'] = name.text
uuid_elem = tree.find('./uuid')
if uuid_elem is not None:
definition['uuid'] = uuid_elem.text
else:
definition['uuid'] = str(uuid.uuid4())
vcpu = tree.find('./vcpu')
if vcpu is not None:
definition['vcpu'] = int(vcpu.text)
memory = tree.find('./memory')
if memory is not None:
definition['memory'] = int(memory.text)
os = {}
os_type = tree.find('./os/type')
if os_type is not None:
os['type'] = os_type.text
os['arch'] = os_type.get('arch', self._connection.host_info.arch)
os_kernel = tree.find('./os/kernel')
if os_kernel is not None:
os['kernel'] = os_kernel.text
os_initrd = tree.find('./os/initrd')
if os_initrd is not None:
os['initrd'] = os_initrd.text
os_cmdline = tree.find('./os/cmdline')
if os_cmdline is not None:
os['cmdline'] = os_cmdline.text
os_boot = tree.find('./os/boot')
if os_boot is not None:
os['boot_dev'] = os_boot.get('dev')
definition['os'] = os
features = {}
acpi = tree.find('./features/acpi')
if acpi is not None:
features['acpi'] = True
definition['features'] = features
devices = {}
device_nodes = tree.find('./devices')
if device_nodes is not None:
disks_info = []
disks = device_nodes.findall('./disk')
for disk in disks:
disks_info += [_parse_disk_info(disk)]
devices['disks'] = disks_info
nics_info = []
nics = device_nodes.findall('./interface')
for nic in nics:
nic_info = {}
nic_info['type'] = nic.get('type')
mac = nic.find('./mac')
if mac is not None:
nic_info['mac'] = mac.get('address')
source = nic.find('./source')
if source is not None:
if nic_info['type'] == 'network':
nic_info['source'] = source.get('network')
elif nic_info['type'] == 'bridge':
nic_info['source'] = source.get('bridge')
nics_info += [nic_info]
devices['nics'] = nics_info
definition['devices'] = devices
return definition
def create(self):
self.createWithFlags(0)
def createWithFlags(self, flags):
# FIXME: Not handling flags at the moment
self._state = VIR_DOMAIN_RUNNING
self._connection._mark_running(self)
self._has_saved_state = False
def isActive(self):
return int(self._state == VIR_DOMAIN_RUNNING)
def undefine(self):
self._connection._undefine(self)
def isPersistent(self):
return True
def undefineFlags(self, flags):
self.undefine()
if flags & VIR_DOMAIN_UNDEFINE_MANAGED_SAVE:
if self.hasManagedSaveImage(0):
self.managedSaveRemove()
def destroy(self):
self._state = VIR_DOMAIN_SHUTOFF
self._connection._mark_not_running(self)
def ID(self):
return self._id
def name(self):
return self._def['name']
def UUIDString(self):
return self._def['uuid']
def interfaceStats(self, device):
return [10000242400, 1234, 0, 2, 213412343233, 34214234, 23, 3]
def blockStats(self, device):
return [2, 10000242400, 234, 2343424234, 34]
def setTime(self, time=None, flags=0):
pass
def suspend(self):
self._state = VIR_DOMAIN_PAUSED
def shutdown(self):
self._state = VIR_DOMAIN_SHUTDOWN
self._connection._mark_not_running(self)
def reset(self, flags):
# FIXME: Not handling flags at the moment
self._state = VIR_DOMAIN_RUNNING
self._connection._mark_running(self)
def info(self):
return [self._state,
int(self._def['memory']),
int(self._def['memory']),
self._def['vcpu'],
123456789]
def migrateToURI(self, desturi, flags, dname, bandwidth):
raise make_libvirtError(
libvirtError,
"Migration always fails for fake libvirt!",
error_code=VIR_ERR_INTERNAL_ERROR,
error_domain=VIR_FROM_QEMU)
def migrateToURI2(self, dconnuri, miguri, dxml, flags, dname, bandwidth):
raise make_libvirtError(
libvirtError,
"Migration always fails for fake libvirt!",
error_code=VIR_ERR_INTERNAL_ERROR,
error_domain=VIR_FROM_QEMU)
def migrateToURI3(self, dconnuri, params, logical_sum):
raise make_libvirtError(
libvirtError,
"Migration always fails for fake libvirt!",
error_code=VIR_ERR_INTERNAL_ERROR,
error_domain=VIR_FROM_QEMU)
def migrateSetMaxDowntime(self, downtime):
pass
def attachDevice(self, xml):
disk_info = _parse_disk_info(etree.fromstring(xml))
disk_info['_attached'] = True
self._def['devices']['disks'] += [disk_info]
return True
def attachDeviceFlags(self, xml, flags):
if (flags & VIR_DOMAIN_AFFECT_LIVE and
self._state != VIR_DOMAIN_RUNNING):
raise make_libvirtError(
libvirtError,
"AFFECT_LIVE only allowed for running domains!",
error_code=VIR_ERR_INTERNAL_ERROR,
error_domain=VIR_FROM_QEMU)
self.attachDevice(xml)
def detachDevice(self, xml):
disk_info = _parse_disk_info(etree.fromstring(xml))
disk_info['_attached'] = True
return disk_info in self._def['devices']['disks']
def detachDeviceFlags(self, xml, flags):
self.detachDevice(xml)
def setUserPassword(self, user, password, flags=0):
pass
def XMLDesc(self, flags):
disks = ''
for disk in self._def['devices']['disks']:
disks += '''<disk type='%(type)s' device='%(device)s'>
<driver name='%(driver_name)s' type='%(driver_type)s'/>
<source file='%(source)s'/>
<target dev='%(target_dev)s' bus='%(target_bus)s'/>
<address type='drive' controller='0' bus='0' unit='0'/>
</disk>''' % disk
nics = ''
for nic in self._def['devices']['nics']:
nics += '''<interface type='%(type)s'>
<mac address='%(mac)s'/>
<source %(type)s='%(source)s'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x03'
function='0x0'/>
</interface>''' % nic
return '''<domain type='kvm'>
<name>%(name)s</name>
<uuid>%(uuid)s</uuid>
<memory>%(memory)s</memory>
<currentMemory>%(memory)s</currentMemory>
<vcpu>%(vcpu)s</vcpu>
<os>
<type arch='%(arch)s' machine='pc-0.12'>hvm</type>
<boot dev='hd'/>
</os>
<features>
<acpi/>
<apic/>
<pae/>
</features>
<clock offset='localtime'/>
<on_poweroff>destroy</on_poweroff>
<on_reboot>restart</on_reboot>
<on_crash>restart</on_crash>
<devices>
<emulator>/usr/bin/kvm</emulator>
%(disks)s
<controller type='ide' index='0'>
<address type='pci' domain='0x0000' bus='0x00' slot='0x01'
function='0x1'/>
</controller>
%(nics)s
<serial type='file'>
<source path='dummy.log'/>
<target port='0'/>
</serial>
<serial type='pty'>
<source pty='/dev/pts/27'/>
<target port='1'/>
</serial>
<serial type='tcp'>
<source host="-1" service="-1" mode="bind"/>
</serial>
<console type='file'>
<source path='dummy.log'/>
<target port='0'/>
</console>
<input type='tablet' bus='usb'/>
<input type='mouse' bus='ps2'/>
<graphics type='vnc' port='-1' autoport='yes'/>
<graphics type='spice' port='-1' autoport='yes'/>
<video>
<model type='cirrus' vram='9216' heads='1'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x02'
function='0x0'/>
</video>
<memballoon model='virtio'>
<address type='pci' domain='0x0000' bus='0x00' slot='0x04'
function='0x0'/>
</memballoon>
</devices>
</domain>''' % {'name': self._def['name'],
'uuid': self._def['uuid'],
'memory': self._def['memory'],
'vcpu': self._def['vcpu'],
'arch': self._def['os']['arch'],
'disks': disks,
'nics': nics}
def managedSave(self, flags):
self._connection._mark_not_running(self)
self._has_saved_state = True
def managedSaveRemove(self, flags):
self._has_saved_state = False
def hasManagedSaveImage(self, flags):
return int(self._has_saved_state)
def resume(self):
self._state = VIR_DOMAIN_RUNNING
def snapshotCreateXML(self, xml, flags):
tree = etree.fromstring(xml)
name = tree.find('./name').text
snapshot = DomainSnapshot(name, self)
self._snapshots[name] = snapshot
return snapshot
def vcpus(self):
vcpus = ([], [])
for i in range(0, self._def['vcpu']):
vcpus[0].append((i, 1, 120405, i))
vcpus[1].append((True, True, True, True))
return vcpus
def memoryStats(self):
return {}
def maxMemory(self):
return self._def['memory']
def blockJobInfo(self, disk, flags):
return {}
def blockJobAbort(self, disk, flags):
pass
def blockResize(self, disk, size):
pass
def blockRebase(self, disk, base, bandwidth=0, flags=0):
if (not base) and (flags and VIR_DOMAIN_BLOCK_REBASE_RELATIVE):
raise make_libvirtError(
libvirtError,
'flag VIR_DOMAIN_BLOCK_REBASE_RELATIVE is '
'valid only with non-null base',
error_code=VIR_ERR_INVALID_ARG,
error_domain=VIR_FROM_QEMU)
return 0
def blockCommit(self, disk, base, top, flags):
return 0
def jobInfo(self):
# NOTE(danms): This is an array of 12 integers, so just report
# something to avoid an IndexError if we look at this
return [0] * 12
def jobStats(self, flags=0):
return {}
def injectNMI(self, flags=0):
return 0
def abortJob(self):
pass
def fsFreeze(self):
pass
def fsThaw(self):
pass
class DomainSnapshot(object):
def __init__(self, name, domain):
self._name = name
self._domain = domain
def delete(self, flags):
del self._domain._snapshots[self._name]
class Connection(object):
def __init__(self, uri=None, readonly=False, version=FAKE_LIBVIRT_VERSION,
hv_version=FAKE_QEMU_VERSION, host_info=None):
if not uri or uri == '':
if allow_default_uri_connection:
uri = 'qemu:///session'
else:
raise ValueError("URI was None, but fake libvirt is "
"configured to not accept this.")
uri_whitelist = ['qemu:///system',
'qemu:///session',
'lxc:///', # from LibvirtDriver._uri()
'xen:///', # from LibvirtDriver._uri()
'uml:///system',
'test:///default',
'parallels:///system']
if uri not in uri_whitelist:
raise make_libvirtError(
libvirtError,
"libvirt error: no connection driver "
"available for No connection for URI %s" % uri,
error_code=5, error_domain=0)
self.readonly = readonly
self._uri = uri
self._vms = {}
self._running_vms = {}
self._id_counter = 1 # libvirt reserves 0 for the hypervisor.
self._nwfilters = {}
self._nodedevs = {}
self._event_callbacks = {}
self.fakeLibVersion = version
self.fakeVersion = hv_version
self.host_info = host_info or HostInfo()
def _add_filter(self, nwfilter):
self._nwfilters[nwfilter._name] = nwfilter
def _remove_filter(self, nwfilter):
del self._nwfilters[nwfilter._name]
def _add_nodedev(self, nodedev):
self._nodedevs[nodedev._name] = nodedev
def _remove_nodedev(self, nodedev):
del self._nodedevs[nodedev._name]
def _mark_running(self, dom):
self._running_vms[self._id_counter] = dom
self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_STARTED, 0)
self._id_counter += 1
def _mark_not_running(self, dom):
if dom._transient:
self._undefine(dom)
dom._id = -1
for (k, v) in six.iteritems(self._running_vms):
if v == dom:
del self._running_vms[k]
self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_STOPPED, 0)
return
def _undefine(self, dom):
del self._vms[dom.name()]
if not dom._transient:
self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_UNDEFINED, 0)
def getInfo(self):
return [self.host_info.arch,
self.host_info.kB_mem,
self.host_info.cpus,
self.host_info.cpu_mhz,
self.host_info.cpu_nodes,
self.host_info.cpu_sockets,
self.host_info.cpu_cores,
self.host_info.cpu_threads]
def numOfDomains(self):
return len(self._running_vms)
def listDomainsID(self):
return list(self._running_vms.keys())
def lookupByID(self, id):
if id in self._running_vms:
return self._running_vms[id]
raise make_libvirtError(
libvirtError,
'Domain not found: no domain with matching id %d' % id,
error_code=VIR_ERR_NO_DOMAIN,
error_domain=VIR_FROM_QEMU)
def lookupByName(self, name):
if name in self._vms:
return self._vms[name]
raise make_libvirtError(
libvirtError,
'Domain not found: no domain with matching name "%s"' % name,
error_code=VIR_ERR_NO_DOMAIN,
error_domain=VIR_FROM_QEMU)
def listAllDomains(self, flags):
vms = []
for vm in self._vms:
if flags & VIR_CONNECT_LIST_DOMAINS_ACTIVE:
if vm.state != VIR_DOMAIN_SHUTOFF:
vms.append(vm)
if flags & VIR_CONNECT_LIST_DOMAINS_INACTIVE:
if vm.state == VIR_DOMAIN_SHUTOFF:
vms.append(vm)
return vms
def _emit_lifecycle(self, dom, event, detail):
if VIR_DOMAIN_EVENT_ID_LIFECYCLE not in self._event_callbacks:
return
cbinfo = self._event_callbacks[VIR_DOMAIN_EVENT_ID_LIFECYCLE]
callback = cbinfo[0]
opaque = cbinfo[1]
callback(self, dom, event, detail, opaque)
def defineXML(self, xml):
dom = Domain(connection=self, running=False, transient=False, xml=xml)
self._vms[dom.name()] = dom
self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_DEFINED, 0)
return dom
def createXML(self, xml, flags):
dom = Domain(connection=self, running=True, transient=True, xml=xml)
self._vms[dom.name()] = dom
self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_STARTED, 0)
return dom
def getType(self):
if self._uri == 'qemu:///system':
return 'QEMU'
def getLibVersion(self):
return self.fakeLibVersion
def getVersion(self):
return self.fakeVersion
def getHostname(self):
return 'compute1'
def domainEventRegisterAny(self, dom, eventid, callback, opaque):
self._event_callbacks[eventid] = [callback, opaque]
def registerCloseCallback(self, cb, opaque):
pass
def getCPUMap(self):
"""Return calculated CPU map from HostInfo, by default showing 2
online CPUs.
"""
active_cpus = self.host_info.cpus
total_cpus = active_cpus + len(self.host_info.disabled_cpus_list)
cpu_map = [True if cpu_num not in self.host_info.disabled_cpus_list
else False for cpu_num in range(total_cpus)]
return (total_cpus, cpu_map, active_cpus)
def getCapabilities(self):
"""Return spoofed capabilities."""
numa_topology = self.host_info.get_numa_topology()
if isinstance(numa_topology, vconfig.LibvirtConfigCapsNUMATopology):
numa_topology = numa_topology.to_xml()
return '''<capabilities>
<host>
<uuid>cef19ce0-0ca2-11df-855d-b19fbce37686</uuid>
<cpu>
<arch>x86_64</arch>
<model>Penryn</model>
<vendor>Intel</vendor>
<topology sockets='%(sockets)s' cores='%(cores)s' threads='%(threads)s'/>
<feature name='xtpr'/>
<feature name='tm2'/>
<feature name='est'/>
<feature name='vmx'/>
<feature name='ds_cpl'/>
<feature name='monitor'/>
<feature name='pbe'/>
<feature name='tm'/>
<feature name='ht'/>
<feature name='ss'/>
<feature name='acpi'/>
<feature name='ds'/>
<feature name='vme'/>
</cpu>
<migration_features>
<live/>
<uri_transports>
<uri_transport>tcp</uri_transport>
</uri_transports>
</migration_features>
%(topology)s
<secmodel>
<model>apparmor</model>
<doi>0</doi>
</secmodel>
</host>
<guest>
<os_type>hvm</os_type>
<arch name='i686'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu</emulator>
<machine>pc-0.14</machine>
<machine canonical='pc-0.14'>pc</machine>
<machine>pc-0.13</machine>
<machine>pc-0.12</machine>
<machine>pc-0.11</machine>
<machine>pc-0.10</machine>
<machine>isapc</machine>
<domain type='qemu'>
</domain>
<domain type='kvm'>
<emulator>/usr/bin/kvm</emulator>
<machine>pc-0.14</machine>
<machine canonical='pc-0.14'>pc</machine>
<machine>pc-0.13</machine>
<machine>pc-0.12</machine>
<machine>pc-0.11</machine>
<machine>pc-0.10</machine>
<machine>isapc</machine>
</domain>
</arch>
<features>
<cpuselection/>
<deviceboot/>
<pae/>
<nonpae/>
<acpi default='on' toggle='yes'/>
<apic default='on' toggle='no'/>
</features>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='x86_64'>
<wordsize>64</wordsize>
<emulator>/usr/bin/qemu-system-x86_64</emulator>
<machine>pc-0.14</machine>
<machine canonical='pc-0.14'>pc</machine>
<machine>pc-0.13</machine>
<machine>pc-0.12</machine>
<machine>pc-0.11</machine>
<machine>pc-0.10</machine>
<machine>isapc</machine>
<domain type='qemu'>
</domain>
<domain type='kvm'>
<emulator>/usr/bin/kvm</emulator>
<machine>pc-0.14</machine>
<machine canonical='pc-0.14'>pc</machine>
<machine>pc-0.13</machine>
<machine>pc-0.12</machine>
<machine>pc-0.11</machine>
<machine>pc-0.10</machine>
<machine>isapc</machine>
</domain>
</arch>
<features>
<cpuselection/>
<deviceboot/>
<acpi default='on' toggle='yes'/>
<apic default='on' toggle='no'/>
</features>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='armv7l'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu-system-arm</emulator>
<machine>integratorcp</machine>
<machine>vexpress-a9</machine>
<machine>syborg</machine>
<machine>musicpal</machine>
<machine>mainstone</machine>
<machine>n800</machine>
<machine>n810</machine>
<machine>n900</machine>
<machine>cheetah</machine>
<machine>sx1</machine>
<machine>sx1-v1</machine>
<machine>beagle</machine>
<machine>beaglexm</machine>
<machine>tosa</machine>
<machine>akita</machine>
<machine>spitz</machine>
<machine>borzoi</machine>
<machine>terrier</machine>
<machine>connex</machine>
<machine>verdex</machine>
<machine>lm3s811evb</machine>
<machine>lm3s6965evb</machine>
<machine>realview-eb</machine>
<machine>realview-eb-mpcore</machine>
<machine>realview-pb-a8</machine>
<machine>realview-pbx-a9</machine>
<machine>versatilepb</machine>
<machine>versatileab</machine>
<domain type='qemu'>
</domain>
</arch>
<features>
<deviceboot/>
</features>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='mips'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu-system-mips</emulator>
<machine>malta</machine>
<machine>mipssim</machine>
<machine>magnum</machine>
<machine>pica61</machine>
<machine>mips</machine>
<domain type='qemu'>
</domain>
</arch>
<features>
<deviceboot/>
</features>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='mipsel'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu-system-mipsel</emulator>
<machine>malta</machine>
<machine>mipssim</machine>
<machine>magnum</machine>
<machine>pica61</machine>
<machine>mips</machine>
<domain type='qemu'>
</domain>
</arch>
<features>
<deviceboot/>
</features>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='sparc'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu-system-sparc</emulator>
<machine>SS-5</machine>
<machine>leon3_generic</machine>
<machine>SS-10</machine>
<machine>SS-600MP</machine>
<machine>SS-20</machine>
<machine>Voyager</machine>
<machine>LX</machine>
<machine>SS-4</machine>
<machine>SPARCClassic</machine>
<machine>SPARCbook</machine>
<machine>SS-1000</machine>
<machine>SS-2000</machine>
<machine>SS-2</machine>
<domain type='qemu'>
</domain>
</arch>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='ppc'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu-system-ppc</emulator>
<machine>g3beige</machine>
<machine>virtex-ml507</machine>
<machine>mpc8544ds</machine>
<machine canonical='bamboo-0.13'>bamboo</machine>
<machine>bamboo-0.13</machine>
<machine>bamboo-0.12</machine>
<machine>ref405ep</machine>
<machine>taihu</machine>
<machine>mac99</machine>
<machine>prep</machine>
<domain type='qemu'>
</domain>
</arch>
<features>
<deviceboot/>
</features>
</guest>
</capabilities>''' % {'sockets': self.host_info.cpu_sockets,
'cores': self.host_info.cpu_cores,
'threads': self.host_info.cpu_threads,
'topology': numa_topology}
def compareCPU(self, xml, flags):
tree = etree.fromstring(xml)
arch_node = tree.find('./arch')
if arch_node is not None:
if arch_node.text not in [arch.X86_64,
arch.I686]:
return VIR_CPU_COMPARE_INCOMPATIBLE
model_node = tree.find('./model')
if model_node is not None:
if model_node.text != self.host_info.cpu_model:
return VIR_CPU_COMPARE_INCOMPATIBLE
vendor_node = tree.find('./vendor')
if vendor_node is not None:
if vendor_node.text != self.host_info.cpu_vendor:
return VIR_CPU_COMPARE_INCOMPATIBLE
# The rest of the stuff libvirt implements is rather complicated
# and I don't think it adds much value to replicate it here.
return VIR_CPU_COMPARE_IDENTICAL
def getCPUStats(self, cpuNum, flag):
if cpuNum < 2:
return {'kernel': 5664160000000,
'idle': 1592705190000000,
'user': 26728850000000,
'iowait': 6121490000000}
else:
raise make_libvirtError(
libvirtError,
"invalid argument: Invalid cpu number",
error_code=VIR_ERR_INTERNAL_ERROR,
error_domain=VIR_FROM_QEMU)
def nwfilterLookupByName(self, name):
try:
return self._nwfilters[name]
except KeyError:
raise make_libvirtError(
libvirtError,
"no nwfilter with matching name %s" % name,
error_code=VIR_ERR_NO_NWFILTER,
error_domain=VIR_FROM_NWFILTER)
def nwfilterDefineXML(self, xml):
nwfilter = NWFilter(self, xml)
self._add_filter(nwfilter)
def nodeDeviceLookupByName(self, name):
try:
return self._nodedevs[name]
except KeyError:
raise make_libvirtError(
libvirtError,
"no nodedev with matching name %s" % name,
error_code=VIR_ERR_NO_NODE_DEVICE,
error_domain=VIR_FROM_NODEDEV)
def listDefinedDomains(self):
return []
def listDevices(self, cap, flags):
return []
def baselineCPU(self, cpu, flag):
"""Add new libvirt API."""
return """<cpu mode='custom' match='exact'>
<model>Penryn</model>
<vendor>Intel</vendor>
<feature name='xtpr'/>
<feature name='tm2'/>
<feature name='est'/>
<feature name='vmx'/>
<feature name='ds_cpl'/>
<feature name='monitor'/>
<feature name='pbe'/>
<feature name='tm'/>
<feature name='ht'/>
<feature name='ss'/>
<feature name='acpi'/>
<feature name='ds'/>
<feature name='vme'/>
<feature policy='require' name='aes'/>
</cpu>"""
def secretLookupByUsage(self, usage_type_obj, usage_id):
pass
def secretDefineXML(self, xml):
pass
def openAuth(uri, auth, flags=0):
if type(auth) != list:
raise Exception("Expected a list for 'auth' parameter")
if type(auth[0]) != list:
raise Exception("Expected a function in 'auth[0]' parameter")
if not callable(auth[1]):
raise Exception("Expected a function in 'auth[1]' parameter")
return Connection(uri, (flags == VIR_CONNECT_RO))
def virEventRunDefaultImpl():
time.sleep(1)
def virEventRegisterDefaultImpl():
if connection_used:
raise Exception("virEventRegisterDefaultImpl() must be "
"called before connection is used.")
def registerErrorHandler(handler, ctxt):
pass
def make_libvirtError(error_class, msg, error_code=None,
error_domain=None, error_message=None,
error_level=None, str1=None, str2=None, str3=None,
int1=None, int2=None):
"""Convenience function for creating `libvirtError` exceptions which
allow you to specify arguments in constructor without having to manipulate
the `err` tuple directly.
We need to pass in `error_class` to this function because it may be
`libvirt.libvirtError` or `fakelibvirt.libvirtError` depending on whether
`libvirt-python` is installed.
"""
exc = error_class(msg)
exc.err = (error_code, error_domain, error_message, error_level,
str1, str2, str3, int1, int2)
return exc
virDomain = Domain
virNodeDevice = NodeDevice
virConnect = Connection
class FakeLibvirtFixture(fixtures.Fixture):
"""Performs global setup/stubbing for all libvirt tests.
"""
def setUp(self):
super(FakeLibvirtFixture, self).setUp()
disable_event_thread(self)
| [
"[email protected]"
] | |
e58665ff3fd90ce5824d7791abbdc9f0b0b97237 | b454c3af46b3c495d298bf4f4554718b3ca0f7cb | /_syntax/number.py | c387793f6f2c35935d9a986f5ad9d77d7dd285ee | [] | no_license | jaelyangChoi/CodingTest | fd9bab94cbcad0308631d2b663acd07b87216f6c | 8d81035f04a05fcfc8ccdafe8627800648bc73cc | refs/heads/master | 2023-06-01T23:05:38.521250 | 2021-06-19T03:53:20 | 2021-06-19T03:53:20 | 339,958,839 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | py | a = 1e9 # 1억
print(int(a))
pi = 314e-2
print(pi)
# 실수는 오차를 포함한다. 이진수는 실수를 정확히 표현할 수 없다.
a = .3 + .6
print(a) # 0.8999999999999999
# => round로 반올림
print(round(a, 1)) # 0.9
print(round(3.1423, 2)) # 3.14
| [
"[email protected]"
] | |
0432977e460d9740d98c78b2f667853a927bfa88 | 1e15b421142c26e5285d16a58ac6aca0b062cbef | /venv/Scripts/django-admin.py | bfe0e1fb72c8c2509cf247d49ece44c756696536 | [] | no_license | subha3495/subhacookiesproj | c2d49a5eb154aba27bfc99d194cee49efb8e89c3 | 3068f104dc83c76c81bd7a227c790f3f4d4d1b0f | refs/heads/master | 2020-05-17T03:50:07.209409 | 2019-04-25T18:55:32 | 2019-04-25T18:55:32 | 183,492,017 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 180 | py | #!C:\Users\subha\PycharmProjects\subhacookiesproj\venv\Scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| [
"[email protected]"
] | |
6c078604796dd123c75509a7767be4cb0dd37108 | a978d41e3754a2ece3d649888d7dc549b319817e | /apps/store/migrations/0002_auto_20171026_0244.py | a1a36f76d7053b9e39316b910dd3d5eb0ebab4e4 | [] | no_license | Ericksmith/board_games | ace0ca4d04b4eb508114c8c43d14ff3f5322b9f8 | 4c8a41f9e7bac8dfc59d1cba35c8c10b345d2e57 | refs/heads/master | 2021-09-05T07:15:23.888922 | 2018-01-25T04:39:24 | 2018-01-25T04:39:24 | 108,183,604 | 0 | 0 | null | 2018-01-25T04:39:24 | 2017-10-24T21:05:31 | Python | UTF-8 | Python | false | false | 434 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-26 02:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('store', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='game',
name='publisher',
field=models.SmallIntegerField(),
),
]
| [
"[email protected]"
] | |
8179ed536b854e23b05d411c6409878d9f8fbeef | 599069eeeae294950aab730ca8d4858ac1929a5c | /bemani/backend/bishi/base.py | d7ace86131c74ce846cf138365fdb3868d06b493 | [] | no_license | ByteFun/bemaniutils | 232d057d4b548f929af4da4f145565ad51482113 | bd467a9b732a25a1c8aba75106dc459fbdff61b0 | refs/heads/master | 2020-12-04T07:45:45.503620 | 2019-12-08T21:57:08 | 2019-12-08T21:57:08 | 231,683,196 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 718 | py | # vim: set fileencoding=utf-8
from typing import Optional
from bemani.backend.base import Base
from bemani.backend.core import CoreHandler, CardManagerHandler, PASELIHandler
from bemani.common import GameConstants
class BishiBashiBase(CoreHandler, CardManagerHandler, PASELIHandler, Base):
"""
Base game class for all one Bishi Bashi version that we support (lol).
In theory we could add support for Bishi Bashi Channel, but that never
happened.
"""
game = GameConstants.BISHI_BASHI
def previous_version(self) -> Optional['BishiBashiBase']:
"""
Returns the previous version of the game, based on this game. Should
be overridden.
"""
return None
| [
"[email protected]"
] | |
d1f7f4d7645b9e48ae6990d58072c273a9c73196 | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /all-gists/c703e319a1c79cd92f373c6c99dd4a40/snippet.py | f0eb1096bf3bccd9d42dd6d8a64c2f5536dbc3d2 | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 446 | py | people = 30
cars = 40
buses = 15
if cars > people:
print("we should take the cars.")
elif cars < people:
print("we should not take cars.")
else:
print("we can't decide.")
if buses > cars:
print("too many buses.")
elif buses < cars:
print("maybe we could take the buses.")
else:
print("we still can't decide.")
if people > buses:
print("ok, let's take the buses.")
else:
print("fine, let's stay at home then.")
| [
"[email protected]"
] | |
ad553c348469b6a023c2b222cff54e6aaa95712d | 6ff318a9f67a3191b2a9f1d365b275c2d0e5794f | /python/day8/add.py | e060b151f7e7662949c0265338cee9b7d3e34c06 | [] | no_license | lvhanzhi/Python | c1846cb83660d60a55b0f1d2ed299bc0632af4ba | c89f882f601898b5caab25855ffa7d7a1794f9ab | refs/heads/master | 2020-03-25T23:34:00.919197 | 2018-09-13T12:19:51 | 2018-09-13T12:19:51 | 144,281,084 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | with open('test.txt','a',encoding='utf8')as f:
f.write('哈哈哈,打不过我吧,没有办法\n') | [
"[email protected]"
] | |
9dc8f2e6c9f60331c1c4894e1182c096842a971a | f9a587ffcc42e06294f12ac761bcc589ba202f85 | /lesson04/del_r.py | 01a034a4317736930147e9cc21cb34e12acce1b3 | [] | no_license | Bulgakoff/files_utf8_04 | 3c49d5f8b637eaa834dd99fd6f140f37ebf96794 | a6299e9c088812c63e1967290a97a2798bc19d1c | refs/heads/master | 2020-09-16T09:10:00.268684 | 2019-11-27T14:58:14 | 2019-11-27T14:58:14 | 223,722,605 | 0 | 0 | null | 2019-11-27T14:58:15 | 2019-11-24T09:51:44 | Python | UTF-8 | Python | false | false | 158 | py |
with open('del.txt', 'rb') as f:
result = f.readlines()
print(result)
f_var = []
for p in result[:]:
f_var.append(p.decode('utf-8'))
print(f_var) | [
"[email protected]"
] | |
454704723c91aaa38436e984d0a865a2c976bf16 | 45f93a9d47204d76b8bf25a71dfb79403e75c33c | /Trees_and_Graphs/Binary Trees/__Right-Sibling-Tree.py | 5274bf5cf821019dce12066fb96d3cb3d3954489 | [] | no_license | tahmid-tanzim/problem-solving | 0173bce1973ac3e95441a76c10324c0e1b0a57c3 | 6ddb51de6772130f209474e76f39ca2938f444f0 | refs/heads/master | 2023-06-25T02:18:03.690263 | 2023-06-20T06:58:46 | 2023-06-20T06:58:46 | 137,173,850 | 4 | 1 | null | 2022-03-30T08:28:41 | 2018-06-13T06:44:25 | Python | UTF-8 | Python | false | false | 1,842 | py | #!/usr/bin/python3
# https://www.algoexpert.io/questions/Right%20Sibling%20Tree
"""
Write a function that takes in a Binary Tree, transforms it into a Right Sibling Tree, and returns its root.
A Right Sibling Tree is obtained by making every node in a Binary Tree have
its right property point to its right sibling instead of its
right child. A node's right sibling is the node immediately to its right on
the same level or None / null if there is no node immediately to its right.
Note that once the transformation is complete, some nodes might no longer have
a node pointing to them. For example, in the sample output below, the node
with value 10 no longer has any inbound pointers and is effectively unreachable.
The transformation should be done in place, meaning that the original data
structure should be mutated (no new structure should be created).
Each BinaryTree node has an integer value, a
left child node, and a right child node. Children
nodes can either be BinaryTree nodes themselves or None / null.
Sample Input
tree = 1
/ \
2 3
/ \ / \
4 5 6 7
/ \ \ / / \
8 9 10 11 12 13
/
14
Sample Output
1 // the root node with value 1
/
2-----------3
/ /
4-----5-----6-----7
/ / /
8---9 10-11 12-13 // the node with value 10 no longer has a node pointing to it
/
14
"""
class BinaryTree:
def __init__(self, value, left=None, right=None):
self.value = value
self.left = left
self.right = right
# O(n) time | O(d) space
# where n is the number of nodes in the Binary Tree and d is the depth (height) of the Binary Tree
def rightSiblingTree(root):
pass
if __name__ == "__main__":
pass
| [
"[email protected]"
] | |
5b49f9ed2760916b19bd8f252a154986087ef41b | 23b686feb2d0ab9082a7ce622fc055946ed99c55 | /.history/atkd/views_20190409152250.py | 368f52282a2afbb37d8b96dd4a13e80011e77a5e | [] | no_license | jasvr/atkd | a18b9840bf9948a7560684cd5eb0d5e22f6c52c7 | daf61f7aa11cfc812171298894b1d0019641c4bd | refs/heads/master | 2020-05-07T09:35:56.343837 | 2019-04-12T16:17:09 | 2019-04-12T16:17:09 | 180,383,260 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | from django.shortcuts import render
from .models import Parent, Student
def parent_list(request):
parents = Parent.objects.all()
return render(request) | [
"[email protected]"
] | |
5485a970566a26513ba7761fcf092dac48138d02 | ee561aa019a80f621007f82bdb21fe6ed8b6278f | /devel/turtlebot3-melodic-devel/turtlebot3_description/catkin_generated/pkg.installspace.context.pc.py | b595bae6cb52a86f7d280558d437f0187c648e65 | [] | no_license | allanwhledu/agv_edu_prj | 4fb5fbf14cf0a14edd57ee9bd87903dc25d4d4f2 | 643a8a96ca7027529332f25208350de78c07e33d | refs/heads/master | 2020-09-23T23:32:54.430035 | 2019-12-04T07:47:55 | 2019-12-04T07:47:55 | 225,613,426 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 376 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "urdf;xacro".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "turtlebot3_description"
PROJECT_SPACE_DIR = "/usr/local"
PROJECT_VERSION = "1.2.2"
| [
"[email protected]"
] | |
8525100e60ff3148ec5d2892294ea3ed84fb74f7 | b0cc5920a18d31bc22d346ae10e99e6b78b12b32 | /wsgi/zosiaproject/agenda/views.py | 4132c02e6a381bf1a3b0baeaae4c4ff632a0ba79 | [] | no_license | kamarkiewicz/zosiaproject | de33525b786908585edd03dfb235c67031bcd440 | 440316ae1dea4feff5b6a9ac6f40c19382022d91 | refs/heads/master | 2021-01-17T09:51:34.944777 | 2016-01-03T21:06:58 | 2016-01-03T22:07:47 | 41,800,412 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 419 | py | from django.views.generic.base import TemplateView
from django.utils import timezone
from .models import Agenda
class AgendaView(TemplateView):
template_name = 'agenda.html'
def get_context_data(self, **kwargs):
context = super(AgendaView, self).get_context_data(**kwargs)
context['agenda'] = Agenda.objects \
.filter(pub_date__lte=timezone.now()).first()
return context
| [
"[email protected]"
] | |
343654a6ff8b9ad62dfae3413bb5577c2026f7eb | 5966449d2e29c9b64351895db2932f94f9de42da | /catkin_ws/build/detection/catkin_generated/pkg.installspace.context.pc.py | d0f2a9e6ba34e48a5a09cbb48f107f358d311951 | [] | no_license | godaeseong/GoHriProject | 8cbce6934485b8ba3253fc7b6c5b5b59397b4518 | 425e70b7c91b6215f5477fc2250d2b0ac96577be | refs/heads/master | 2021-05-11T22:11:56.099580 | 2018-01-15T02:20:43 | 2018-01-15T02:20:43 | 117,484,817 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,577 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/hri/catkin_ws/install/include;/opt/ros/kinetic/include/opencv-3.3.1;/opt/ros/kinetic/include/opencv-3.3.1/opencv".split(';') if "/home/hri/catkin_ws/install/include;/opt/ros/kinetic/include/opencv-3.3.1;/opt/ros/kinetic/include/opencv-3.3.1/opencv" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp;pcl_ros;pcl_conversions;opt_utils;opt_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-ldetection;-lopt_utils;/opt/ros/kinetic/lib/libopencv_calib3d3.so;/opt/ros/kinetic/lib/libopencv_core3.so;/opt/ros/kinetic/lib/libopencv_dnn3.so;/opt/ros/kinetic/lib/libopencv_features2d3.so;/opt/ros/kinetic/lib/libopencv_flann3.so;/opt/ros/kinetic/lib/libopencv_highgui3.so;/opt/ros/kinetic/lib/libopencv_imgcodecs3.so;/opt/ros/kinetic/lib/libopencv_imgproc3.so;/opt/ros/kinetic/lib/libopencv_ml3.so;/opt/ros/kinetic/lib/libopencv_objdetect3.so;/opt/ros/kinetic/lib/libopencv_photo3.so;/opt/ros/kinetic/lib/libopencv_shape3.so;/opt/ros/kinetic/lib/libopencv_stitching3.so;/opt/ros/kinetic/lib/libopencv_superres3.so;/opt/ros/kinetic/lib/libopencv_video3.so;/opt/ros/kinetic/lib/libopencv_videoio3.so;/opt/ros/kinetic/lib/libopencv_videostab3.so;/opt/ros/kinetic/lib/libopencv_viz3.so;/opt/ros/kinetic/lib/libopencv_aruco3.so;/opt/ros/kinetic/lib/libopencv_bgsegm3.so;/opt/ros/kinetic/lib/libopencv_bioinspired3.so;/opt/ros/kinetic/lib/libopencv_ccalib3.so;/opt/ros/kinetic/lib/libopencv_cvv3.so;/opt/ros/kinetic/lib/libopencv_datasets3.so;/opt/ros/kinetic/lib/libopencv_dpm3.so;/opt/ros/kinetic/lib/libopencv_face3.so;/opt/ros/kinetic/lib/libopencv_fuzzy3.so;/opt/ros/kinetic/lib/libopencv_hdf3.so;/opt/ros/kinetic/lib/libopencv_img_hash3.so;/opt/ros/kinetic/lib/libopencv_line_descriptor3.so;/opt/ros/kinetic/lib/libopencv_optflow3.so;/opt/ros/kinetic/lib/libopencv_phase_unwrapping3.so;/opt/ros/kinetic/lib/libopencv_plot3.so;/opt/ros/kinetic/lib/libopencv_reg3.so;/opt/ros/kinetic/lib/libopencv_rgbd3.so;/opt/ros/kinetic/lib/libopencv_saliency3.so;/opt/ros/kinetic/lib/libopencv_stereo3.so;/opt/ros/kinetic/lib/libopencv_structured_light3.so;/opt/ros/kinetic/lib/libopencv_surface_matching3.so;/opt/ros/kinetic/lib/libopencv_text3.so;/opt/ros/kinetic/lib/libopencv_tracking3.so;/opt/ros/kinetic/lib/libopencv_xfeatures2d3.so;/opt/ros/kinetic/lib/libopencv_ximgproc3.so;/opt/ros/kinetic/lib/libopencv_xobjdetect3.so;/opt/ros/kinetic/lib/libopencv_xphoto3.so".split(';') if "-ldetection;-lopt_utils;/opt/ros/kinetic/lib/libopencv_calib3d3.so;/opt/ros/kinetic/lib/libopencv_core3.so;/opt/ros/kinetic/lib/libopencv_dnn3.so;/opt/ros/kinetic/lib/libopencv_features2d3.so;/opt/ros/kinetic/lib/libopencv_flann3.so;/opt/ros/kinetic/lib/libopencv_highgui3.so;/opt/ros/kinetic/lib/libopencv_imgcodecs3.so;/opt/ros/kinetic/lib/libopencv_imgproc3.so;/opt/ros/kinetic/lib/libopencv_ml3.so;/opt/ros/kinetic/lib/libopencv_objdetect3.so;/opt/ros/kinetic/lib/libopencv_photo3.so;/opt/ros/kinetic/lib/libopencv_shape3.so;/opt/ros/kinetic/lib/libopencv_stitching3.so;/opt/ros/kinetic/lib/libopencv_superres3.so;/opt/ros/kinetic/lib/libopencv_video3.so;/opt/ros/kinetic/lib/libopencv_videoio3.so;/opt/ros/kinetic/lib/libopencv_videostab3.so;/opt/ros/kinetic/lib/libopencv_viz3.so;/opt/ros/kinetic/lib/libopencv_aruco3.so;/opt/ros/kinetic/lib/libopencv_bgsegm3.so;/opt/ros/kinetic/lib/libopencv_bioinspired3.so;/opt/ros/kinetic/lib/libopencv_ccalib3.so;/opt/ros/kinetic/lib/libopencv_cvv3.so;/opt/ros/kinetic/lib/libopencv_datasets3.so;/opt/ros/kinetic/lib/libopencv_dpm3.so;/opt/ros/kinetic/lib/libopencv_face3.so;/opt/ros/kinetic/lib/libopencv_fuzzy3.so;/opt/ros/kinetic/lib/libopencv_hdf3.so;/opt/ros/kinetic/lib/libopencv_img_hash3.so;/opt/ros/kinetic/lib/libopencv_line_descriptor3.so;/opt/ros/kinetic/lib/libopencv_optflow3.so;/opt/ros/kinetic/lib/libopencv_phase_unwrapping3.so;/opt/ros/kinetic/lib/libopencv_plot3.so;/opt/ros/kinetic/lib/libopencv_reg3.so;/opt/ros/kinetic/lib/libopencv_rgbd3.so;/opt/ros/kinetic/lib/libopencv_saliency3.so;/opt/ros/kinetic/lib/libopencv_stereo3.so;/opt/ros/kinetic/lib/libopencv_structured_light3.so;/opt/ros/kinetic/lib/libopencv_surface_matching3.so;/opt/ros/kinetic/lib/libopencv_text3.so;/opt/ros/kinetic/lib/libopencv_tracking3.so;/opt/ros/kinetic/lib/libopencv_xfeatures2d3.so;/opt/ros/kinetic/lib/libopencv_ximgproc3.so;/opt/ros/kinetic/lib/libopencv_xobjdetect3.so;/opt/ros/kinetic/lib/libopencv_xphoto3.so" != "" else []
PROJECT_NAME = "detection"
PROJECT_SPACE_DIR = "/home/hri/catkin_ws/install"
PROJECT_VERSION = "0.0.1"
| [
"[email protected]"
] | |
35fbfc803b4c58ed3877c42a9dea8ca8ed8cce89 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_180/ch39_2019_09_04_15_02_03_498630.py | 0029d8ac133813a2cedbf0938fddc211d09ee1cd | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 144 | py | soma = 0
while True:
numa = int(input("Numeros para somar: "))
soma = soma + numa
if numa == 0:
break
print(soma)
| [
"[email protected]"
] | |
0f677f5d072eb753b88fe94e2feeead2bb50a595 | f714db4463dd37fc33382364dc4b1963a9053e49 | /src/sentry/analytics/events/first_transaction_sent.py | d972abeae1561c5b7482283e597056d244743902 | [
"BUSL-1.1",
"Apache-2.0"
] | permissive | macher91/sentry | 92171c2ad23564bf52627fcd711855685b138cbd | dd94d574403c95eaea6d4ccf93526577f3d9261b | refs/heads/master | 2021-07-07T08:23:53.339912 | 2020-07-21T08:03:55 | 2020-07-21T08:03:55 | 140,079,930 | 0 | 0 | BSD-3-Clause | 2020-05-13T11:28:35 | 2018-07-07T11:50:48 | Python | UTF-8 | Python | false | false | 443 | py | from __future__ import absolute_import
from sentry import analytics
class FirstTransactionSentEvent(analytics.Event):
type = "first_transaction.sent"
attributes = (
analytics.Attribute("organization_id"),
analytics.Attribute("project_id"),
analytics.Attribute("platform", required=False),
analytics.Attribute("default_user_id", required=False),
)
analytics.register(FirstTransactionSentEvent)
| [
"[email protected]"
] | |
d1865dd785d2765014d2581d2626fbe175aced68 | 049d1262acb5e0a0be1201a12b479a7a111cb6b9 | /jacob/bin/wheel | 3dc16ecb462f5c5b1a855b258c3c3b4aceb27f2e | [] | no_license | jaybenaim/day13-reinforcements | c579c908ac35abbe541431a85158f3c3b4ae55bf | d31e89d340465aff7e5f7917924f0ed64566a34a | refs/heads/master | 2020-06-22T01:53:54.259573 | 2019-07-19T17:33:42 | 2019-07-19T17:33:42 | 197,604,415 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | #!/Users/jay/bitmaker/projects/day13-oop/assignments/reinforcements/jacob/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from wheel.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
aeef4cd9ad9cf60858a571c69a45edf2eaedd352 | eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7 | /google/appengine/v1/google-cloud-appengine-v1-py/google/cloud/appengine_admin_v1/services/firewall/pagers.py | fb10d4619bc29c2163fff3b9ab11c3472f65e6f1 | [
"Apache-2.0"
] | permissive | Tryweirder/googleapis-gen | 2e5daf46574c3af3d448f1177eaebe809100c346 | 45d8e9377379f9d1d4e166e80415a8c1737f284d | refs/heads/master | 2023-04-05T06:30:04.726589 | 2021-04-13T23:35:20 | 2021-04-13T23:35:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,937 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional
from google.cloud.appengine_admin_v1.types import appengine
from google.cloud.appengine_admin_v1.types import firewall
class ListIngressRulesPager:
"""A pager for iterating through ``list_ingress_rules`` requests.
This class thinly wraps an initial
:class:`google.cloud.appengine_admin_v1.types.ListIngressRulesResponse` object, and
provides an ``__iter__`` method to iterate through its
``ingress_rules`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListIngressRules`` requests and continue to iterate
through the ``ingress_rules`` field on the
corresponding responses.
All the usual :class:`google.cloud.appengine_admin_v1.types.ListIngressRulesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., appengine.ListIngressRulesResponse],
request: appengine.ListIngressRulesRequest,
response: appengine.ListIngressRulesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.appengine_admin_v1.types.ListIngressRulesRequest):
The initial request object.
response (google.cloud.appengine_admin_v1.types.ListIngressRulesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = appengine.ListIngressRulesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[appengine.ListIngressRulesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[firewall.FirewallRule]:
for page in self.pages:
yield from page.ingress_rules
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListIngressRulesAsyncPager:
"""A pager for iterating through ``list_ingress_rules`` requests.
This class thinly wraps an initial
:class:`google.cloud.appengine_admin_v1.types.ListIngressRulesResponse` object, and
provides an ``__aiter__`` method to iterate through its
``ingress_rules`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListIngressRules`` requests and continue to iterate
through the ``ingress_rules`` field on the
corresponding responses.
All the usual :class:`google.cloud.appengine_admin_v1.types.ListIngressRulesResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[appengine.ListIngressRulesResponse]],
request: appengine.ListIngressRulesRequest,
response: appengine.ListIngressRulesResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.appengine_admin_v1.types.ListIngressRulesRequest):
The initial request object.
response (google.cloud.appengine_admin_v1.types.ListIngressRulesResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = appengine.ListIngressRulesRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[appengine.ListIngressRulesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[firewall.FirewallRule]:
async def async_generator():
async for page in self.pages:
for response in page.ingress_rules:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
47cc7185780e69d639c732ae362796e493f9cf77 | baaeb8c1d335e258fd49b5ef024ac39790fd660f | /backend/alembic_users/versions/ed860c399094_added_is_gold_evaluation_cols_to_jobs_.py | bab6dc2c44ecb1672d28025e6494b2f2c7d15e48 | [] | no_license | ReactARDev/React_Redux_Python | f0b80a9d2a603b38f8e144966bc899c5aa3690e6 | afdb4a55f82fdff86686ad955448a4168d05c739 | refs/heads/master | 2021-10-10T19:28:05.142652 | 2019-01-15T21:24:06 | 2019-01-15T21:24:06 | 159,198,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,349 | py | """added is_gold_evaluation cols to jobs and topic annotations
Revision ID: ed860c399094
Revises: 9528026442d1
Create Date: 2017-12-18 12:43:18.402875
"""
# revision identifiers, used by Alembic.
revision = 'ed860c399094'
down_revision = '9528026442d1'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('annotation_jobs', sa.Column('is_gold_evaluation', sa.Boolean(), nullable=True))
op.create_index(op.f('ix_annotation_jobs_is_gold_evaluation'), 'annotation_jobs', ['is_gold_evaluation'], unique=False)
op.add_column('topic_annotations', sa.Column('is_gold_evaluation', sa.Boolean(), nullable=True))
op.create_index(op.f('ix_topic_annotations_is_gold_evaluation'), 'topic_annotations', ['is_gold_evaluation'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_topic_annotations_is_gold_evaluation'), table_name='topic_annotations')
op.drop_column('topic_annotations', 'is_gold_evaluation')
op.drop_index(op.f('ix_annotation_jobs_is_gold_evaluation'), table_name='annotation_jobs')
op.drop_column('annotation_jobs', 'is_gold_evaluation')
# ### end Alembic commands ###
| [
"[email protected]"
] | |
223487d09c1e1f4d2f04e6c6301688924842dd39 | 2b6a02a34ee6bf68820ad185245e2609b296e0aa | /216.py | ddb1b4fc0c865ef4cd4c335fb5586c6825614993 | [] | no_license | shants/LeetCodePy | 948e505b6fcb0edcb9a1cf63a245b61d448d6e27 | 2337b5031d4dfe033a471cea8ab4aa5ab66122d0 | refs/heads/master | 2020-03-28T08:43:04.606044 | 2019-11-25T05:03:15 | 2019-11-25T05:03:15 | 147,984,830 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,491 | py | class Solution(object):
def __init__(self):
self.d = {}
# def is_solution(self, k,n,i, a):
# if n == 0 and len(a)==k:
# return True
# else:
# return False
#
# def process_solution(self, k,n,i,a):
# p = a[:]
# a = sorted(a, reverse=False)
# s = "".join([str(s) for s in a])
# self.d[s]= p
def bt(self, k,n,i, a):
if n < 0 :
return
if n!=0 and len(a)>=k:
return
if n==0 and k==len(a):
p = a[:]
a = sorted(a, reverse=False)
s = "".join([str(s) for s in a])
self.d[s] = p
else:
#l1 = [1,2,3,4,5,6,7,8,9]
s = set(a)
c = []
for i in range(1,10):
if i not in s:
c.append(i)
#c = [i for i in l1 if i not in a]
for j in range(len(c)):
a.append(c[j])
self.bt(k, n-c[j], i+1,a)
a.pop()
return
def combinationSum3(self, k, n):
"""
:type k: int
:type n: int
:rtype: List[List[int]]
"""
if n==0:
return [[]]
if n==1 and k==1:
return [[1]]
elif n ==1 and k!=1:
return []
a = []
self.bt(k,n,0, a)
return self.d.values()
if __name__ == "__main__":
s = Solution()
print(s.combinationSum3(3,9)) | [
"[email protected]"
] | |
64a479d9e6ee5f31fe84d13d53432ef96d720493 | d2c4934325f5ddd567963e7bd2bdc0673f92bc40 | /tests/artificial/transf_Difference/trend_LinearTrend/cycle_30/ar_/test_artificial_32_Difference_LinearTrend_30__0.py | 292b04ae191d6b03a01bd8034a851358326c8b29 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jmabry/pyaf | 797acdd585842474ff4ae1d9db5606877252d9b8 | afbc15a851a2445a7824bf255af612dc429265af | refs/heads/master | 2020-03-20T02:14:12.597970 | 2018-12-17T22:08:11 | 2018-12-17T22:08:11 | 137,104,552 | 0 | 0 | BSD-3-Clause | 2018-12-17T22:08:12 | 2018-06-12T17:15:43 | Python | UTF-8 | Python | false | false | 271 | py | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 30, transform = "Difference", sigma = 0.0, exog_count = 0, ar_order = 0); | [
"[email protected]"
] | |
fd8ba159c5468455865fc833112aeb035392e82b | a5e28d513cc29ca39d4b31d44585def1ee6d0ae9 | /tests/conftest.py | 4977ec1f1af76acf593f7155d01c3553166725d7 | [
"Apache-2.0"
] | permissive | blakev/ulid | 1e9c79d0acc3a82bcbf3c01601127e50d7ab15ce | 089c76595c15fd614a1ee0b989353079052abce5 | refs/heads/master | 2021-04-15T12:07:36.881868 | 2017-06-16T05:12:10 | 2017-06-16T05:12:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,928 | py | """
conftest
~~~~~~~~
High level fixtures used across multiple test modules.
"""
import calendar
import datetime
import os
import pytest
import random
from ulid import base32
@pytest.fixture(scope='function')
def ulid_bytes_year_1990(valid_bytes_80):
"""
Fixture that yields a :class:`~bytes` instance that represents a ULID with a timestamp
from the year 1990.
"""
return fixed_year_timestamp_bytes(1990, 1, 1) + valid_bytes_80
@pytest.fixture(scope='function')
def ulid_bytes_year_2000(valid_bytes_80):
"""
Fixture that yields a :class:`~bytes` instance that represents a ULID with a timestamp
from the year 2000.
"""
return fixed_year_timestamp_bytes(2000, 1, 1) + valid_bytes_80
@pytest.fixture(scope='function')
def valid_bytes_128():
"""
Fixture that yields :class:`~bytes` instances that are 128 bits, the length of an entire ULID.
"""
return random_bytes(16)
@pytest.fixture(scope='function')
def valid_bytes_80():
"""
Fixture that yields :class:`~bytes` instances that are 80 bits, the length of a ULID randomness.
"""
return random_bytes(10)
@pytest.fixture(scope='function')
def valid_bytes_48():
"""
Fixture that yields :class:`~bytes` instances that are 48 bits, the length of a ULID timestamp.
"""
return random_bytes(6)
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_bytes_128(request):
"""
Fixture that yields :class:`~bytes` instances that are between 0 and 256 bits, except 128.
"""
return random_bytes(request.param, not_in=[16])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_bytes_80(request):
"""
Fixture that yields :class:`~bytes` instances that are between 0 and 256 bits, except 80.
"""
return random_bytes(request.param, not_in=[10])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_bytes_48(request):
"""
Fixture that yields :class:`~bytes` instances that are between 0 and 256 bits, except 48.
"""
return random_bytes(request.param, not_in=[6])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_bytes_48_80_128(request):
"""
Fixture that yields :class:`~bytes` instances that are between 0 and 256 bits, except 48, 80, and 128.
"""
return random_bytes(request.param, not_in=[6, 10, 16])
@pytest.fixture(scope='function')
def valid_str_26():
"""
Fixture that yields :class:`~str` instances that are 26 characters, the length of an entire ULID.
"""
return random_str(26)
@pytest.fixture(scope='function')
def valid_str_10():
"""
Fixture that yields :class:`~str` instances that are 10 characters, the length of a ULID timestamp.
"""
return random_str(10)
@pytest.fixture(scope='function')
def valid_str_16():
"""
Fixture that yields :class:`~str` instances that are 16 characters, the length of a ULID randomness.
"""
return random_str(16)
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_str_26(request):
"""
Fixture that yields :class:`~str` instances that are between 0 and 32 characters, except 26.
"""
return random_str(request.param, not_in=[26])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_str_16(request):
"""
Fixture that yields :class:`~str` instances that are between 0 and 32 characters, except 16.
"""
return random_str(request.param, not_in=[16])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_str_10(request):
"""
Fixture that yields :class:`~str` instances that are between 0 and 32 characters, except 10.
"""
return random_str(request.param, not_in=[10])
@pytest.fixture(scope='function', params=range(0, 32))
def invalid_str_10_16_26(request):
"""
Fixture that yields :class:`~str` instances that are between 0 and 32 characters, except 10, 16, and 26.
"""
return random_str(request.param, not_in=[10, 16, 26])
def random_bytes(num_bytes, not_in=(-1,)):
"""
Helper function that returns a number of random bytes, optionally excluding those of a specific length.
"""
num_bytes = num_bytes + 1 if num_bytes in not_in else num_bytes
return os.urandom(num_bytes)
def random_str(num_chars, not_in=(-1,)):
"""
Helper function that returns a string with the specified number of random characters, optionally
excluding those of a specific length.
"""
num_chars = num_chars + 1 if num_chars in not_in else num_chars
return ''.join(random.choice(base32.ENCODING) for _ in range(num_chars))
def fixed_year_timestamp_bytes(*args, **kwargs):
"""
Helper function that returns bytes for a :class:`~datetime.datetime` created by the given args.
"""
timestamp = int(calendar.timegm(datetime.datetime(*args, **kwargs).timetuple())) * 1000
return timestamp.to_bytes(6, byteorder='big')
| [
"[email protected]"
] | |
ebc3dcd37e323817823673c28a9a13d8be293c95 | f8ea3582884df87172cb747e424ebd0c20223614 | /tests/oldtests/testrastdist.py | 7ed004fabdd17a007282c998395e528001d9933a | [
"MIT"
] | permissive | karimbahgat/PythonGis | 94f52f800a769ee54b12c7277604ead011465321 | fb99148a15bcbe0438ddca67b484a15076bd961a | refs/heads/master | 2023-04-12T15:59:08.522464 | 2022-09-09T22:48:32 | 2022-09-09T22:48:32 | 47,153,255 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 678 | py |
import pythongis as pg
from time import time
# test distance
vect = pg.VectorData(r"C:\Users\kimo\Downloads\cshapes_0.6\cshapes.shp",
)#select=lambda f: f["GWCODE"]==666)
hist = vect.histogram("GWCODE")
#hist.view()
t = time()
distrast = pg.raster.analyzer.distance(vect, bbox=[-180,90,180,-90], width=72*10, height=36*10)
#distrast = pg.RasterData("C:/Users/kimo/Desktop/world.jpg", bbox=[-180,90,180,-90], width=512, height=256)
print time()-t
hist = distrast.bands[0].histogram()
print hist
#hist.view()
#mapp = distrast.render()
mapp = pg.renderer.Map()
mapp.add_layer(distrast)
mapp.add_layer(vect, fillcolor=None)
#mapp.add_legend()
mapp.view()
| [
"[email protected]"
] | |
a414ae6cc75cc2bab26ee9b925f10d8309d71118 | 59cc4c5983dd486b93e409da3df9e20cd8dbd04e | /metaprog/composition.py | db929ad1fa422c0eb02708e1d27df290842f8ba0 | [] | no_license | titu1994/Python-Work | a95866bf47a3aba274376ec72b994e2e4fbda634 | bc7f201ed01e2f7c49ae8d143d29e87e94420dc9 | refs/heads/master | 2023-04-27T02:33:30.731988 | 2023-04-22T19:13:03 | 2023-04-22T19:13:03 | 53,428,058 | 13 | 10 | null | null | null | null | UTF-8 | Python | false | false | 1,024 | py |
def custom_dir(c, add):
return dir(type(c)) + list(c.__dict__.keys()) + add
class BaseComposite:
"Base class for attr accesses in `self._extra_params` passed down to `self.components`"
@property
def _extra_params(self):
if not hasattr(self, 'components'):
self.components = []
if type(self.components) not in {list, tuple}:
self.components = [self.components]
elif type(self.components) == tuple:
self.components = list(self.components)
args = []
for component in self.components:
args.extend([o for o in dir(component)
if not o.startswith('_')])
return args
def __getattr__(self, k):
if k in self._extra_params:
for component in self.components:
if hasattr(component, k):
return getattr(self.components, k)
raise AttributeError(k)
def __dir__(self):
return custom_dir(self, self._extra_params)
| [
"[email protected]"
] | |
e9aac60d8d793b04b8cce53945eca54786292b2b | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_328/ch25_2020_03_09_20_08_11_452435.py | e45f3d0a86b1198f3a6d0682673b1b7ed61d8b3e | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 330 | py | from math import sin, radians
jacas = float(input("qual a velocidade de lançamento de sua jaca: "))
jacas2 = float(input("qual o ângulo de lançamento da sua jaca: "))
d = (jacas**2*sin*(radians(2*jacas2))/9.8
if d < 98:
print("Muito perto")
elif d >= 98 and d <= 102:
print("Acertou!")
else:
print("Muito longe")
| [
"[email protected]"
] | |
7b927918987478127db33e25f638d3774ffa6cbb | d41c15b9c68ab2ee70740044d25d620e6b90a09e | /app/mod_cmd/commands/status.py | 2faf54044feec0dddce8af23988a4ef47d2decb9 | [
"Apache-2.0"
] | permissive | jaycode/Arthur.workspace | 9093b54cda983d2e8b6745b894403b5fa1282b56 | 7a581104141ee5f556e058b1276b4087a2921dfc | refs/heads/master | 2021-01-10T10:36:35.599700 | 2016-03-21T19:37:49 | 2016-03-21T19:37:49 | 55,436,635 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,480 | py | """This module is useful to allow user finds out current state of their work.
"""
from app.mod_cmd.client_instruction import ClientInstruction
from app.helpers import docs_path
from zipfile import ZipFile
from app import app, mongo
def run(project = None, args = [], **kwargs):
"""Show status of current project. Keep checking this often!
status [item]
Args:
item: Item from the project you wish to view in more detail. Possible values:
- docs: View all docs currently being worked on. Shorthand of `list_docs` command.
- context: View the detail of context currently used in the project.
"""
if project is None:
message = ""
else:
active_doc = ''
if 'last_loaded_doc' in app.session:
active_doc = app.session['last_loaded_doc']
path = docs_path()
with app.get_path(path) as path:
with ZipFile(path, 'r') as zipfile:
docs = len(zipfile.namelist())
current_context = project.context['name']
dfcount = mongo.db.data_fields.count({'project_id': project._id})
message = \
"Project name: %s\n" \
"Last loaded document: %s\n" \
"Total documents: %d\n" \
"# data fields: %d\n" \
"Context: %s" \
% (project.name, active_doc, docs, dfcount, current_context)
instruction = ClientInstruction({
'message': message
})
return [project, instruction] | [
"[email protected]"
] | |
7f027a5f8990360a411659c96ad45506c18ae5a9 | 8319c9859bde5e21eba2ba60219ebe496646470b | /src/stratis_cli/_stratisd_constants.py | 86f901513a72fc8576ab0bd4c5884a268bb8d66e | [
"Apache-2.0"
] | permissive | stratis-storage/stratis-cli | 0be83c0903c1050ac3cf75a19121ba19be97c4a6 | 399c95edd7c37e5fb9494f7829d5355c011fb7d7 | refs/heads/master | 2023-08-31T23:24:02.710481 | 2023-08-30T20:18:02 | 2023-08-30T20:18:02 | 66,956,943 | 107 | 44 | Apache-2.0 | 2023-09-08T18:25:33 | 2016-08-30T16:09:39 | Python | UTF-8 | Python | false | false | 2,347 | py | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Stratisd error classes.
"""
# isort: STDLIB
from enum import Enum, IntEnum
from ._error_codes import PoolMaintenanceErrorCode
class StratisdErrors(IntEnum):
"""
Stratisd Errors
"""
OK = 0
ERROR = 1
def __str__(self):
return self.name
class BlockDevTiers(IntEnum):
"""
Tier to which a blockdev device belongs.
"""
DATA = 0
CACHE = 1
def __str__(self):
return self.name
CLEVIS_KEY_TANG_TRUST_URL = "stratis:tang:trust_url"
CLEVIS_PIN_TANG = "tang"
CLEVIS_PIN_TPM2 = "tpm2"
CLEVIS_KEY_THP = "thp"
CLEVIS_KEY_URL = "url"
class ReportKey(Enum):
"""
Report identifiers.
Note: "managed_objects_report" is not a key recognized by stratisd.
However, since the other constants are, and they are all used together,
this type is defined with the other stratisd constants.
"""
ENGINE_STATE = "engine_state_report"
MANAGED_OBJECTS = "managed_objects_report"
STOPPED_POOLS = "stopped_pools"
class PoolActionAvailability(IntEnum):
"""
What category of interactions a pool is enabled for.
"""
fully_operational = 0 # pylint: disable=invalid-name
no_ipc_requests = 1 # pylint: disable=invalid-name
no_pool_changes = 2 # pylint: disable=invalid-name
def pool_maintenance_error_codes(self):
"""
Return the list of PoolMaintenanceErrorCodes for this availability.
:rtype: list of PoolMaintenanceErrorCode
"""
codes = []
if self >= PoolActionAvailability.no_ipc_requests:
codes.append(PoolMaintenanceErrorCode.NO_IPC_REQUESTS)
if self >= PoolActionAvailability.no_pool_changes:
codes.append(PoolMaintenanceErrorCode.NO_POOL_CHANGES)
return codes
| [
"[email protected]"
] | |
3eab99e78874519237ee18fbd383413183909b24 | 92b7afd4c17088a63a38d80d2f981cf146eae988 | /Chapter02/U02_Ex05_ConvertCtoF_Table.py | 612668d6c17270152ff0932fe7173e0c27aaa9b4 | [] | no_license | billm79/COOP2018 | 6d3e5d9f8309646beab0078a2f84bb6fe30b18fc | 6588c0ebfa932fbae7eec11c20270e4a8e969377 | refs/heads/master | 2020-03-28T02:00:28.959515 | 2019-05-13T17:51:05 | 2019-05-13T17:51:05 | 147,540,965 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 957 | py | # U02_Ex05_ConvertCtoF_Table.py
#
# Author: Bill Montana
# Course: Coding for OOP
# Section: A3
# Date: 18 Nov 2017
# IDE: PyCharm Community Edition
#
# Assignment Info
# Exercise: 5
# Source: Python Programming
# Chapter: 2
#
# Program Description
# Computes and prints a table of Celsius temperatures and the Fahrenheit equivalents from 0°C to 100°C
#
# Algorithm (pseudocode)
# introduce program
# print table headings
# loop from 0 to 100 in increments of 10
# calculate °F from loop variable (°C)
# print results in table
def main():
print('This program computes and prints a table of Celsius temperatures and the Fahrenheit equivalents from 0°C to 100°C.')
print('\n{:^3}\t{:^5}'.format('°C', '°F'))
print('{:^3}\t{:^5}'.format('---', '-----'))
for celsius in range(0, 101, 10):
fahrenheit = 1.8 * celsius + 32
print('{:>3}\t{:>5.1f}'.format(celsius, fahrenheit))
main() | [
"[email protected]"
] | |
cf77e4bc9a182a406270a6fd7cf2558350304f7c | d41d18d3ea6edd2ec478b500386375a8693f1392 | /plotly/validators/scatterpolargl/unselected/marker/_opacity.py | 3cb2d6a2455082c340af789b6b315dfff4f79427 | [
"MIT"
] | permissive | miladrux/plotly.py | 38921dd6618650d03be9891d6078e771ffccc99a | dbb79e43e2cc6c5762251537d24bad1dab930fff | refs/heads/master | 2020-03-27T01:46:57.497871 | 2018-08-20T22:37:38 | 2018-08-20T22:37:38 | 145,742,203 | 1 | 0 | MIT | 2018-08-22T17:37:07 | 2018-08-22T17:37:07 | null | UTF-8 | Python | false | false | 500 | py | import _plotly_utils.basevalidators
class OpacityValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name='opacity',
parent_name='scatterpolargl.unselected.marker',
**kwargs
):
super(OpacityValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='style',
max=1,
min=0,
role='style',
**kwargs
)
| [
"[email protected]"
] | |
57fa882d992e873136c6e65dba8a3c40fc606e37 | 95efc2300bd2936eb9b4ca8f9cda55764047f094 | /django1/src/customlogin/urls.py | 14fcc5ea0182abaa54ff28601abb0ce65e803934 | [] | no_license | gittaek/jeong | d207d6e41398803475aff82a49bea01e21a86901 | 20808cbb97daff79a4c0b4a017106519f99d919f | refs/heads/master | 2020-04-21T23:11:17.202531 | 2019-02-10T03:20:57 | 2019-02-10T03:20:57 | 169,938,169 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | #하위 URLConf
#app_name: 하위 URLConf 파일의 등록된 URL들의 그룹명
#urlpatterns: URL과 뷰함수를 이그트형태로 등록하는 변수
from django.urls import path
from .views import *
app_name = 'cl'
urlpatterns = [
path('signup/', signup, name= 'signup'),
path('signin/', signin, name= 'signin'),
path('signout/', signout, name= 'signout'),
] | [
"user@DESKTOP-37GULAI"
] | user@DESKTOP-37GULAI |
68618a7466a5c4c5db83220054f3f89a5f25af56 | 4c7914bf0eb52f2fe5dab70fa630a322a9449e05 | /淘宝美食/spider_tb.py | 979bf47a13e824f3edf29f2d47cb5b352420348d | [] | no_license | xhongc/pythonCrawl | f334d737326a47782d2533c4db23734729f13099 | a38e59496dd78b6e070ea6882043b1744190103e | refs/heads/master | 2022-12-10T01:22:01.608193 | 2020-01-12T09:43:19 | 2020-01-12T09:43:22 | 93,115,695 | 4 | 5 | null | 2022-11-22T02:36:28 | 2017-06-02T01:47:22 | Python | UTF-8 | Python | false | false | 3,288 | py | import re
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import json
import pymysql
from config import *
from multiprocessing import Pool
driver = webdriver.Chrome(executable_path='D:\work\chromedriver\chromedriver.exe')
wait = WebDriverWait(driver, 10)
# 链接MySQL数据库
conn = pymysql.connect(**db_config)
cursor = conn.cursor()
def search():
try:
driver.get('https://www.taobao.com/')
input = wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#q')))
submit = wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#J_TSearchForm > div.search-button > button')))
input.send_keys('美食')
submit.click()
# 返回总页数
total = wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > div.total')))
return total.text
except TimeoutException:
search()
def next_page(page_num):
try:
input = wait.until(
EC.presence_of_element_located((By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > div.form > input')))
submit = wait.until(
EC.presence_of_element_located(
(By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > div.form > span.btn.J_Submit')))
# 清除输入框数据
input.clear()
input.send_keys(page_num)
submit.click()
wait.until(EC.text_to_be_present_in_element(
(By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > ul > li.item.active > span'), str(page_num)))
get_products()
except TimeoutException:
next_page(page_num)
def get_products():
wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '#mainsrp-itemlist > div > div')))
# 获取网页源码
html = driver.page_source
soup = BeautifulSoup(html, 'lxml')
list1 = soup.findAll('div', {'data-category': 'auctions'})
for each in list1:
items = {}
items['title'] = each.find('div', 'row row-2 title').get_text().strip()
items['momeny'] = each.find('div', 'price g_price g_price-highlight').strong.get_text()
items['people'] = each.find('div', 'deal-cnt').get_text()[:-3]
items['name'] = each.find('a', 'shopname J_MouseEneterLeave J_ShopInfo').get_text().strip()
save_products(items)
def save_products(items):
# with open('products.json','a',encoding='utf-8') as f:
# f.write(json.dumps(content,ensure_ascii=False)+'\n')
sql = "insert into taobao(title,momeny,people,name)VALUES (%s,%s,%s,%s)"
try:
cursor.execute(sql, (items['title'], items['momeny'], items['people'], items['name']))
conn.commit()
except pymysql.Error as e:
print(e.args)
def main():
try:
total = search()
total = int(re.compile(r'.*?(\d+)').search(total).group(1))
for i in range(1, total + 1):
next_page(i)
finally:
driver.close()
cursor.close()
cnn.close()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
ea2438007b0386076e93ad827190c5c71c47f32f | 99efa551de2a4586767d109b211d8c016c5454c4 | /String/App1/K.py | 17d6884198554cfa3fc90d41e6451d43c863783f | [] | no_license | Vijay-Ky/Anit_Rooman_Python_Training | 9decf34b651ea8455bdd4cdf1e2239a77cf1bc7e | 155ba84620c28fd64a7219013c3bdd43f76fa278 | refs/heads/master | 2020-05-31T23:22:45.865104 | 2019-06-18T09:33:10 | 2019-06-18T09:33:10 | 190,537,707 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | var1 = 'Hello World!'
var2 = "Python Programming"
print ("var1[0]: ", var1[0])
print ("var2[1:5]: ", var2[1:5])
| [
"[email protected]"
] | |
dc75718ca509b24cafd770b46279b39533e4dce4 | 1c74a2e075793e1d35c441518e2e138e14e26ea5 | /Tree/124. 二叉树中的最大路径和.py | b4a70b63656b087e29f283c107cc12e1c831c361 | [] | no_license | Dawinia/LeetCode | 1a385bfadbc4869c46dc1e9b8ca7656b77d746a0 | e1dcc71ca657b42eb8eb15116697e852ef4a475a | refs/heads/master | 2021-07-20T00:56:01.058471 | 2020-07-22T14:07:04 | 2020-07-22T14:07:04 | 197,305,126 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 616 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def __init__(self):
self.ans = -1000000000
def maxPathSum(self, root: TreeNode) -> int:
self.traverse(root)
return self.ans
def traverse(self, root):
if not root:
return 0
left = max(0, self.traverse(root.left))
right = max(0, self.traverse(root.right))
self.ans = max(self.ans, left + right + root.val)
return max(left, right) + root.val
| [
"[email protected]"
] | |
c6705abcd1b38cd180cd1869f249ca60fb5c4516 | ef187d259d33e97c7b9ed07dfbf065cec3e41f59 | /work/atcoder/abc/abc017/C/answers/121328_akio0803.py | 35e70dd74eb09b1c6b633e244a904471419d122e | [] | no_license | kjnh10/pcw | 847f7295ea3174490485ffe14ce4cdea0931c032 | 8f677701bce15517fb9362cc5b596644da62dca8 | refs/heads/master | 2020-03-18T09:54:23.442772 | 2018-07-19T00:26:09 | 2018-07-19T00:26:09 | 134,586,379 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 285 | py | N, M = [int(_) for _ in input().split()]
res = [0] * (M + 2)
ans = 0
for _ in range(N):
l, r, s = [int(_) for _ in input().split()]
res[l] = res[l] + s
res[r+1] = res[r+1] - s
ans += s
for i in range(M+1):
res[i+1] = res[i] + res[i+1]
print(ans - min(res[1:-1])) | [
"[email protected]"
] | |
c2a251f706de272919eee0bf2b981f3e0def3bbe | 17268419060d62dabb6e9b9ca70742f0a5ba1494 | /pp/assert_grating_coupler_properties.py | e4623d5afe7b5f3da50f619ba7c002a9a0e798c3 | [
"MIT"
] | permissive | TrendingTechnology/gdsfactory | a19124423b12cbbb4f35b61f33303e9a012f82e5 | c968558dba1bae7a0421bdf49dc192068147b776 | refs/heads/master | 2023-02-22T03:05:16.412440 | 2021-01-24T03:38:00 | 2021-01-24T03:38:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 990 | py | from pp.component import Component
def assert_grating_coupler_properties(gc: Component):
assert hasattr(
gc, "polarization"
), f"{gc.name} does not have polarization attribute"
assert gc.polarization in [
"te",
"tm",
], f"{gc.name} polarization should be 'te' or 'tm'"
assert hasattr(
gc, "wavelength"
), f"{gc.name} wavelength does not have wavelength attribute"
assert (
500 < gc.wavelength < 2000
), f"{gc.name} wavelength {gc.wavelength} should be in nm"
if "W0" not in gc.ports:
print(f"grating_coupler {gc.name} should have a W0 port. It has {gc.ports}")
if "W0" in gc.ports and gc.ports["W0"].orientation != 180:
print(
f"grating_coupler {gc.name} W0 port should have orientation = 180 degrees. It has {gc.ports['W0'].orientation}"
)
if __name__ == "__main__":
import pp
c = pp.c.grating_coupler_elliptical_te()
assert_grating_coupler_properties(c)
| [
"[email protected]"
] | |
dfd15b290e3345001bd70e8dccef8397e16bb767 | e56b63fa189d6c1e84eda135a41ae63d6177c10e | /setup.py | 29e819082c2ce53fd58b560ba7c80ae49b497ba4 | [] | no_license | bibi21000/janitoo_scene | 22ededfc79866a806b22770f8c347169407523cb | c9c8f6b58f5109c47ae302b1fbc58c39d1c35857 | refs/heads/master | 2021-01-17T18:10:49.644483 | 2016-06-22T22:10:42 | 2016-06-22T22:10:42 | 60,803,529 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,642 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Setup file of Janitoo
"""
__license__ = """
This file is part of Janitoo.
Janitoo is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Janitoo is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Janitoo. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'Sébastien GALLET aka bibi21000'
__email__ = '[email protected]'
__copyright__ = "Copyright © 2013-2014-2015-2016 Sébastien GALLET aka bibi21000"
from os import name as os_name
from setuptools import setup, find_packages
from distutils.extension import Extension
from platform import system as platform_system
import glob
import os
import sys
from _version import janitoo_version
DEBIAN_PACKAGE = False
filtered_args = []
for arg in sys.argv:
if arg == "--debian-package":
DEBIAN_PACKAGE = True
else:
filtered_args.append(arg)
sys.argv = filtered_args
def data_files_config(res, rsrc, src, pattern):
for root, dirs, fils in os.walk(src):
if src == root:
sub = []
for fil in fils:
sub.append(os.path.join(root,fil))
res.append((rsrc, sub))
for dire in dirs:
data_files_config(res, os.path.join(rsrc, dire), os.path.join(root, dire), pattern)
data_files = []
data_files_config(data_files, 'docs','src/docs/','*')
setup(
name = 'janitoo_scene',
description = "Scene components",
long_description = "Scene components",
license = """
This file is part of Janitoo.
Janitoo is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Janitoo is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Janitoo. If not, see <http://www.gnu.org/licenses/>.
""",
author='Sébastien GALLET aka bibi2100 <[email protected]>',
author_email='[email protected]',
url='http://bibi21000.gallet.info/',
version = janitoo_version,
keywords = "scene",
zip_safe = False,
packages = find_packages('src', exclude=["scripts", "docs", "config"]),
package_dir = { '': 'src' },
include_package_data=True,
data_files = data_files,
install_requires=[
'janitoo',
'janitoo_factory',
],
dependency_links = [
'https://github.com/bibi21000/janitoo/archive/master.zip#egg=janitoo',
'https://github.com/bibi21000/janitoo_factory/archive/master.zip#egg=janitoo_factory',
],
entry_points = {
"janitoo.components": [
"scene.simple = janitoo_scenarios.component:make_simple_scene",
],
"janitoo.threads": [
"scene = janitoo_scenes.thread:make_scene",
],
},
)
| [
"[email protected]"
] | |
10e2d1487315bb928296b66298978da171a19fac | fb909b0716f62ae118afa7d505cbcbd28f62bc63 | /main/views.py | 8282565f82dee5405121aed7cdc52d07b5200ebf | [] | no_license | dkalola/JustAsk-Final | a5b951462cd3c88eb84320bb8fcf10c32f959090 | c2e7c2ffae4d3c2d870d5ba5348a6bae62db5319 | refs/heads/main | 2023-05-24T16:02:17.425251 | 2021-06-16T19:33:52 | 2021-06-16T19:33:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74,259 | py | from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth import login, logout, authenticate
from django.contrib import messages
from .forms import NewUserForm
from . models import Student,Question,Answer,EBook,Note,Paper,Like,Dislike,BuyBook,BookSol,BookRequest,Document,BookTransaction,SubsTransaction,Message,Text,Comments
from . models import CreditTransaction,QuizeQuestion,Choice,Searche,Refer,Feedback
import numpy as np
import re
from django.urls import reverse
from django.contrib.auth.models import User
from django.core.paginator import Paginator
import re
from django.utils.html import strip_tags
import random
import string
from gingerit.gingerit import GingerIt
import nltk.data
from nltk import sent_tokenize
from . import Checksum
from .Checksum import verify_checksum
from django.views.decorators.csrf import csrf_exempt
from .utils import VerifyPaytmResponse
from JUSTASK import settings
import requests
from datetime import datetime
from datetime import timedelta , timezone
from django.conf import settings
from django.core.mail import send_mail
from django.contrib import auth
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string, get_template
import Levenshtein as lev
import csv
import codecs
import numpy as np
import pandas as pd
from paypal.standard.forms import PayPalPaymentsForm
def unanswered(request):
ques = []
for q in Question.objects.all():
if q.answered == False:
ques.append(q)
paginator = Paginator(ques,6)
page2 = request.GET.get('page')
ques = paginator.get_page(page2)
data={
'questions':ques,
}
return render(request, 'main/unanswered.html',data)
def qsearch_request(request):
bad_chars = [';', ':', '!', "*",")","(","{","}","[","]"]
qu = []
ques = []
search_r = re.sub(' +', ' ',request.GET.get('search')).rstrip().lstrip()
for i in bad_chars :
search_r = search_r.replace(i, '')
if (search_r == ''):
return redirect("main:homepage")
else:
for qd in Question.objects.all():
if qd.answered == False:
cleantext = strip_tags(qd.question)
match = lev.ratio(search_r,cleantext)
cord = (qd,match*100)
qu.append(cord)
if len(qu) == 100:
break
sortlist = Sort_Tuple(qu)
for s in range(len(sortlist)):
ques.append(sortlist[s][0])
paginator = Paginator(ques,6)
page = request.GET.get('page')
ques = paginator.get_page(page)
data={
'question':strip_tags(search_r),
'questions':ques,
}
return render(request,"main/unanswered.html",data)
def expert_apply(request):
# if request.user.student.quiz == True:
# return redirect('main:profile')
return render(request, 'main/expert_apply.html')
# return redirect("main:profile")
def feedback(request):
where = request.POST.get('where')
sug = request.POST.get('suggestions')
how = request.POST.get('how')
user = request.user.student
fd = Feedback(concern=how,suggestions=sug,where=where,user=user)
fd.save()
return redirect('main:homepage')
def quiz(request):
email = request.POST.get('email')
field = request.POST.get('field')
field2 = request.POST.get('field2')
qlistf1=[]
qlistf2=[]
if email == '':
return redirect('main:expert_apply')
if field == 'economics/commerce':
field == 'economics'
if field2 == 'economics/commerce':
field2 == 'economics'
for q in QuizeQuestion.objects.order_by('?'):
if q.question_field == field:
if(len(qlistf1) == 20):
break
qlistf1.append(q)
if q.question_field == field2:
if(len(qlistf2) == 20):
break
qlistf2.append(q)
else:
pass
# print('fields here')
# print(field,field2)
for x in qlistf2:
#print(x)
qlistf1.append(x)
random.shuffle(qlistf1)
#print(Choice.objects.all())
data={
'field1':field2,
'questions':qlistf1,
'choice':Choice.objects.order_by('?'),
'field3':field,
}
request.user.student.quiz = True
request.user.student.save(update_fields=['quiz'])
return render(request, 'main/quiz.html',data)
def QuizCheck(request):
if request.user.is_anonymous != True:
field = request.POST.get('field')
field2 = request.POST.get('field2')
#print(field,field2)
#print('----------')
choice = []
result = []
score = 0
for q in QuizeQuestion.objects.all():
if q.question_field == field or q.question_field == field2:
# print(request.POST.get(f'{q.id}'))
# print('-------------')
choice.append(request.POST.get(f'{q.id}'))
#print(request.POST.get(f'{q.id}'))
else:
pass
#print(choice)
for c in choice:
#print('==============')
#print(c)
for i in Choice.objects.all():
if c == str(i.id):
result.append((strip_tags(i.question),i.choice,i.is_answer))
#print(i.is_answer)
if i.is_answer:
score = score + 1
# print("score : ",score)
break
dataframe = pd.DataFrame(result,columns=['Question','Choice','Answer'])
dataframe.to_csv(f"/home/JustAsk/media/ScoreCards/{request.user.student.Studentid}.csv")
#print(int((score/len(choice))*100))
request.user.student.QuizScoreCard = f"./media/ScoreCards/{request.user.student.Studentid}.csv" #/home/JustAsk/media/ScoreCards
request.user.student.testscore = int((score/len(choice))*100)
request.user.student.fieldofexpertise = f'{field}, {field2}'
request.user.student.QuizDate = datetime.now()
request.user.student.save(update_fields=['fieldofexpertise','testscore','QuizScoreCard','QuizDate'])
if request.user.student.testscore > 70:
print("--------Expert--------")
request.user.student.expert = True
request.user.student.save(update_fields=['expert'])
else:
return redirect('main:login')
return redirect('main:profile')
def TYDONATE(request):
return render(request, 'main/thankyou.html')
def CSV(request):
return render(request, 'main/csv.html')
def csvImport(request):
file = request.FILES['csv']
df = pd.read_csv(file)
lenght = len(df)
print('===================')
logic = False
for i in range(lenght):
fn = df._get_value(i, 'File_name')
a = df._get_value(i, 'Auth')
des = df._get_value(i, 'Description')
file = df._get_value(i, 'File')
bf = df._get_value(i, 'book_field')
up = df._get_value(i, 'Bcover')
Edition = df._get_value(i, 'Edition')
bid = df._get_value(i, 'BID')
book = EBook(Bcover=up,File_name=fn,Edition=Edition,book_field=bf,File=file,Email='[email protected]',Auth=a,BID=bid,Description=des,Uploaded=False)
book.save()
return render(request, 'main/csv.html')
def checkbook(request): # check if the rented book is expired or not
try:
user = auth.get_user(request)
if user.is_anonymous == False:
print(request.user)
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if student.subs == True and student.First != True:
if numOfDays(datetime.now(timezone.utc), student.EndDate) <= 0:
student.subs = False
student.save(update_fields=['subs'])
else:
pass
for bb in student.buybook_set.all():
if bb.rental == True:
if numOfDays(datetime.now(timezone.utc), bb.EndDate) <= 0:
book = bb
book.delete()
else:
pass
else:
return redirect('main:login')
except:
pass
# def ReferFriend(request):
# Studentid = request.POST.get('studid')
# students = Student.objects.all()
# for student in students:
# if (student.Email == request.user.email):
# return redirect("main:profile")
def RedeemCredts(request):
creditVal = 0.1 #hours/credit
user = auth.get_user(request)
if user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
hours_added = timedelta(hours = (creditVal*student.credits))
student.EndDate = student.EndDate + hours_added
student.credits = 0
student.save(update_fields=['EndDate','credits'])
else:
return redirect("main:login")
return redirect("main:profile")
def sub_ceck(request):
try:
user = auth.get_user(request)
if user.is_anonymous == False:
print(request.user)
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if student.subs == True:
if student.credits >= 2000:
student.credits = 2000
student.save(update_fields=['credits'])
if numOfDays(datetime.now(timezone.utc), student.EndDate) <= 0:
student.subs = False
student.qscore = 0
student.save(update_fields=['subs','qscore'])
messages.error(request, f"Your Subscription has ended. Renew to continue using other features!")
messages.error(request, f"Subscribe Anually and pay for 10 months instead of 12.")
if numOfDays(student.QuizDate, datetime.now(timezone.utc)) >= 365:
student.expert = False
student.quiz = False
student.save(update_fields=['expert','quiz'])
messages.error(request, f"Your Subscription has ended. Renew to continue using other features!")
messages.error(request, f"Subscribe Anually and pay for 10 months instead of 12.")
else:
pass
else:
pass
except:
pass
def game(request): # main home page it will check if the subscription is ended or not
return render(request,"main/game.html")
def homepage(request): # main home page it will check if the subscription is ended or not
sub_ceck(request)
checkbook(request)
stud = 'Empty'
try:
user = auth.get_user(request)
if user.is_anonymous == False:
print(request.user)
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
stud = student
if student.subs != True and student.First == True:
messages.error(request, f"First Time User? Try our Special Offer* for Free! Limited time only.")
else:
pass
data={
'stud':stud,
'books':EBook.objects.all().count(),
'notes':Note.objects.all().count(),
'questions':Question.objects.all().count(),
}
return render(request,"main/home.html",data)
else:
pass
#messages.error(request, f"Register now and get Free Subscription for 1 month!")
except:
pass
#messages.error(request, f"Register now and get Free Subscription for 1 month!")
data={
'stud':stud,
'books':EBook.objects.all().count(),
'notes':Note.objects.all().count(),
'questions':Question.objects.all().count(),
}
return render(request,"main/home.html",data)
# def home(request): # main home page it will check if the subscription is ended or not
# sub_ceck(request)
# checkbook(request)
# stud = 'Empty'
# try:
# user = auth.get_user(request)
# if user.is_anonymous == False:
# print(request.user)
# students = Student.objects.all()
# for student in students:
# if (student.Email == request.user.email):
# stud = student
# if student.subs != True and student.First == True:
# messages.error(request, f"First Time User? Try our Special Offer* for Free! Limited time only.")
# else:
# pass
# data={
# 'stud':stud,
# 'books':EBook.objects.all().count(),
# 'notes':Note.objects.all().count(),
# 'questions':Question.objects.all().count(),
# }
# return render(request,"main/home.html",data)
# else:
# messages.error(request, f"Register now and get Free Subscription for 1 months now!")
# except:
# messages.error(request, f"Register now and get Free Subscription for 1 months now!")
# data={
# 'stud':stud,
# 'books':EBook.objects.all().count(),
# 'notes':Note.objects.all().count(),
# 'questions':Question.objects.all().count(),
# }
# return render(request,"main/home.html",data)
def Cancel(request):
user = auth.get_user(request)
if user.is_anonymous == False:
print(request.user)
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
student.subs = False
student.anual = False
student.confirm = False
student.StartDate = datetime.now(timezone.utc)
student.EndDate = datetime.now(timezone.utc)
student.save(update_fields=['subs','StartDate','EndDate','anual','confirm'])
messages.error(request, f"Your Subscription has been Canceled.")
else:
return redirect("main:login")
return redirect("https://www.paypal.com/cgi-bin/webscr?cmd=_subscr-find&alias=DEVGBPZQWD3UY")
def QNA(request):
sub_ceck(request)
return render(request=request,
template_name="main/Q&A.html")
def terms(request):
sub_ceck(request)
return render(request=request,
template_name="main/terms.html")
def contrib(request):
sub_ceck(request)
return render(request=request,
template_name="main/contribute.html")
def notes(request):
sub_ceck(request)
return render(request=request,
template_name="main/notes_field.html")
def customer_service(request):
sub_ceck(request)
return render(request,"main/customer_service.html")
def note_contrib(request):
user = auth.get_user(request)
if user.is_anonymous == False:
print(request.user)
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
student.credits = subject.credits + 100
student.save(update_fields=['credits'])
else:
return redirect("main:login")
auth = request.POST.get('auth')
title = request.POST.get('title')
email = request.POST.get('email')
description = request.POST.get('foo')
file = request.FILES['file']
field = request.POST.get('field')
note = Note(File_name=title,book_field=field,File=file,Auth=auth,Description=description,Email=email,Uploaded=True)
note.save()
subject = 'Thank you for contributing to Just Ask'
text_content = f''
html_content = f'''
<p style='font-size:20px'>Hello {auth}, <br>Thank you for contributing in Just Ask! <br>Your submission helps us provide better solution to students like you.<br></p>
<p style='font-size:20px'><b>You are contributing the following notes:</b></p>
<hr>
<p><b>Title: </b>{timedeltatle}</p>
<p><b>Author: </b>{auth}</p>
<p><b>Field: </b>{field}</p>
<p><b>Description: </b>{description}</p>
<p><b>File Name: </b>{file}</p>
<hr>
'''
email_from = settings.EMAIL_HOST_USER
recipient_list = [email,email_from, ]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
messages.success(request, f"Your Book '{title}' is added Successfully")
return render(request,"main/contribute.html")
def book_contrib(request):
auth = request.POST.get('auth')
title = request.POST.get('title')
email = request.POST.get('email')
name = request.POST.get('name')
edition = request.POST.get('edition')
field = request.POST.get('field')
ISBN = request.POST.get('isbn')
description = request.POST.get('foo1')
file = request.FILES['file']
cover = request.FILES['file2']
book = EBook(Bcover=cover,File_name=title,Edition=edition,book_field=field,File=file,Auth=auth,BID=ISBN,Description=description,Email=email,Uploaded=True)
book.save()
subject = 'Thank you for contributing to Just Ask'
text_content = f''
html_content = f'''
<p style='font-size:20px'>Hello {auth}, <br>Thank you for contributing to Just Ask! <br>Your submission helps us provide better solution to students like you.<br></p>
<p style='font-size:20px'><b>You are contributing the following book:</b></p>
<hr>
<p><b>Title: </b>{title}</p>
<p><b>Author: </b>{auth}</p>
<p><b>Edition: </b>{edition}</p>
<p><b>ISBN: </b>{ISBN}</p>
<p><b>Field: </b>{field}</p>
<p><b>Description: </b>{description|safe}</p>
<p><b>File Name: </b>{file}</p>
<hr>
'''
email_from = settings.EMAIL_HOST_USER
recipient_list = [email, email_from,]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
messages.success(request, f"Your Book '{title}' is added Successfully")
return render(request,"main/contribute.html")
def ran_gen(size, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
def questionpost(request):
name = request.POST.get('auth')
email = request.POST.get('email')
field = request.POST.get('field')
question = request.POST.get('question')
qid = ran_gen(8, "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890")
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if student.qscore != 0:
student.qscore = student.qscore - 1
student.save(update_fields=['qscore'])
ques = Question(student=student,question=question,question_field=field,qid=qid)
ques.save()
try:
file = request.FILES['file-1']
ques.question_cover = file
ques.save(update_fields=['question_cover'])
except:
raise Http404("No File Attached")
else:
return redirect("main:question_p")
else:
return redirect("main:login")
subject = 'Question Added - Just Ask'
text_content = f''
html_content = f'''
<p style='font-size:20px'><b>Question Information: </b></p>
<hr>
<p><b>Question: </b>{question} USD</p>
<p><b>QID: </b>{qid}</p>
<p><b>Date: </b>{datetime.now()}</p>
<p><b>Student: </b>{request.user.email} {request.user.student.Studentid}</p>
<hr>
<p style='font-size:15px'>Fore more information please email us on <b>[email protected]</b></p>
'''
email_from = settings.EMAIL_HOST_USER
recipient_list = ['[email protected]', ]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
messages.success(request, f"Your question is added Successfully")
return render(request,"main/question_p.html")
def inquire(request):
name = request.POST.get('name')
email = request.POST.get('email')
sub = request.POST.get('subject')
message = request.POST.get('message')
ms = Message(name = name,email=email,subject=sub,message=message)
messages.success(request, f"Your Message Recieved")
ms.save()
return redirect("main:customer_service")
def profile(request):
sub_ceck(request)
data={
'student':'',
'book': 'False',
'Student': ''
}
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if len(student.buybook_set.all()) != 0:
data={
'student':student,
'book': student.buybook_set.all(),
'Student': student,
'diff': numOfDays(datetime.now(timezone.utc), student.EndDate),
'questions':student.question_set.all(),
'count':len(student.question_set.all()),
'transactions':student.substransaction_set.all()
}
else:
data={
'student':student,
'book': 'False',
'Student': student,
'diff': numOfDays(datetime.now(timezone.utc), student.EndDate),
'questions':student.question_set.all(),
'count':len(student.question_set.all()),
'transactions':student.substransaction_set.all()
}
else:
return redirect("main:login")
return render(request,"main/profile.html",data)
def rates(request):
sub_ceck(request)
data={}
if request.user.is_authenticated:
for stud in Student.objects.all():
if stud.Email == request.user.email:
host = request.get_host()
# if subscription_plan == '1-month':
# price = "7"
# billing_cycle = 1
# billing_cycle_unit = "M"
# else:
# price = "70"
# billing_cycle = 1
# billing_cycle_unit = "Y"
pricem = "7.99"
billing_cyclem = 1
billing_cycle_unitm = "M"
paypal_dictm = {
"cmd": "_xclick-subscriptions",
'business': 'DEVGBPZQWD3UY',
"a3": pricem, # monthly price
"p3": billing_cyclem, # duration of each unit (depends on unit)
"t3": billing_cycle_unitm, # duration unit ("M for Month")
"src": "1", # make payments recur
"sra": "1", # reattempt payment on payment error
"no_note": "1", # remove extra notes (optional)
'item_name': 'Monthly Subscription',
'custom': 1, # custom data, pass something meaningful here
'currency_code': 'USD',
'notify_url': 'http://{}{}'.format(host,reverse('paypal-ipn')),
'return_url': 'http://{}{}'.format(host,reverse('main:paypalcallbackmonth')),
'cancel_return': 'http://{}{}'.format(host,reverse('main:profile')),
'payer': request.user,
'custom':request.user.student.Studentid,
}
pricey = "79.99"
billing_cycley = 1
billing_cycle_unity = "Y"
paypal_dicty = {
"cmd": "_xclick-subscriptions",
'business': 'DEVGBPZQWD3UY',
"a3": pricey, # monthly price
"p3": billing_cycley, # duration of each unit (depends on unit)
"t3": billing_cycle_unity, # duration unit ("M for Month")
"src": "1", # make payments recur
"sra": "1", # reattempt payment on payment error
"no_note": "1", # remove extra notes (optional)
'item_name': 'Yearly Subscription',
'custom': 1, # custom data, pass something meaningful here
'currency_code': 'USD',
'notify_url': 'http://{}{}'.format(host,reverse('paypal-ipn')),
'return_url': 'http://{}{}'.format(host,reverse('main:paypalcallbackyear')),
'cancel_return': 'http://{}{}'.format(host,reverse('main:profile')),
'payer': request.user,
'custom':request.user.student.Studentid,
}
formm = PayPalPaymentsForm(initial=paypal_dictm, button_type="subscribe")
formy = PayPalPaymentsForm(initial=paypal_dicty, button_type="subscribe")
data={
'stud':stud,
'formm':formm,
'formy':formy,
}
else:
pass
return render(request,"main/rates.html",data)
def already_sub(request):
students = Student.objects.all()
if requests.user.is_authenticated:
for student in students:
if (student.Email == request.user.email):
data={
'student':student,
'diff': numOfDays(datetime.now(timezone.utc), student.EndDate)
}
else:
return redirect("main:login")
return render(request, 'main/already_subs.html',data)
def register(request):
def ran_gen(size, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
if request.method == 'POST':
form = NewUserForm(request.POST)
if form.is_valid():
print('---------------------in----------------')
username = form.cleaned_data.get('username')
email = form.cleaned_data.get('email')
register = True
emails = []
for stud in Student.objects.all():
emails.append(stud.Email)
for e in emails:
if e == email:
register = False
if register:
messages.success(request, f"Account Created for {username}")
print('------------------')
stid = ran_gen(9, "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890abcdefghijklmnopqrstuvwxyz")
user = form.save()
student = Student(Cuser=user,FirstName=username,Email=form.cleaned_data.get('email'),Studentid=stid,credits=200,qscore=20,PayPalID=stid)
student.save()
data={
'student':username,
}
subject = 'Welcome to Just Ask'
text_content = f''
html_content = render_to_string('registration/email_test.html',data)
email_from = settings.EMAIL_HOST_USER
recipient_list = [email, ]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
subject = 'User Registered - Just Ask'
text_content = f''
html_content = f'''
<p style='font-size:20px'>User Added, <br>This is an email notification for user registration.<br></p>
<p style='font-size:20px'><b>User Information: </b></p>
<hr>
<p><b>Username: </b>{username}</p>
<p><b>Email: </b>{email}</p>
<p><b>Student ID: </b>{stid}</p>
<hr>
'''
email_from = settings.EMAIL_HOST_USER
recipient_list = ['[email protected]', ]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
login(request, user)
return redirect("main:homepage")
else:
messages.error(request, f"{email} is already registered")
pass
else:
messages.error(request, f"Please Check You Password and Username if they have correct formate.")
form = NewUserForm()
return render(request,"main/register.html",context={"form":form})
def ran_gen(size, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
def logout_request(request):
logout(request)
messages.info(request, "Logged Out Successfully")
return redirect("main:homepage")
def login_request(request):
if request.method == 'POST':
form = AuthenticationForm(request, data=request.POST)
if form.is_valid():
username = form.cleaned_data.get('username')
password = form.cleaned_data.get('password')
user = authenticate(username=username,password=password)
if user is not None:
login(request, user)
messages.success(request, f"You are logged in as {username}")
return redirect("main:homepage")
else:
messages.error(request, "Invalid username or password")
else:
messages.error(request, "Invalid username or password")
form = AuthenticationForm()
return render(request,"main/login.html",{"form":form})
def Sort_Tuple(tup):
# reverse = None (Sorts in Ascending order)
# key is set to sort using second element of
# sublist lambda has been used
tup.sort(reverse = True,key = lambda x: x[1])
return tup
def search_request(request):
sub_ceck(request)
bad_chars = [';',")","(","{","}","++"]
qu = []
qlist = []
books = []
qlist2 = []
search_r = re.sub(' ', ' ',request.GET.get('search')).rstrip().lstrip()
change = False
for i in bad_chars :
if i == "++": # ++ is not a good character while displaying the string in Django brings up error.
search_r = search_r.replace(i, 'pp')
change = True
else:
search_r = search_r.replace(i, '')
search_question = search_r
if change:
search_r = search_r.replace("pp", '++')
if (search_r == ''):
return redirect("main:homepage")
else:
if(search_r.lower() == 'grammar' or search_r.lower() == 'writing' or search_r.lower() == 'grammar check'):
return redirect("main:writing")
else:
pass
add = True
for x in Searche.objects.all():
if x.text == search_r:
add = False
if add:
sr = Searche(text=search_r,user=request.user)
sr.save()
if search_r == "CHIRAGJHALA2301":
refer = True
if request.user.is_authenticated:
for r in request.user.student.refer_set.all():
if r.referedby == "CHIRAGJHALA2301":
refer = False
if request.user.is_authenticated:
if refer:
refer = Refer(student=request.user.student,referedby="CHIRAGJHALA2301",credits=0)
refer.save()
request.user.student.subs = True
request.user.student.qscore = 10
request.user.student.StartDate = datetime.now()
request.user.student.EndDate = datetime.now() + timedelta(days=31)
order_id = ran_gen(6, "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890abcdefghijklmnopqrstuvwxyz")
bill_amount = "0"
transaction = SubsTransaction.objects.create(student=request.user.student,orderid=order_id, amount=bill_amount)
transaction.save()
try:
subject = 'Payment Successfull - Just Ask'
text_content = f''
html_content = f'''
<p style='font-size:20px'>Hello {request.user.student.FirstName}, <br>This is to confirm that we have recieved your payment.<br></p>
<p style='font-size:20px'><b>Your Payment Information: </b></p>
<hr>
<p><b>Amount: </b>{bill_amount} USD</p>
<p><b>Name of payer: </b>{request.user.student.FirstName} {request.user.student.SecondName}</p>
<p><b>Order ID: </b>{order_id}</p>
<p><b>Customer ID: </b>{request.user.student.Studentid}</p>
<p><b>Subscription Plan: </b>Monthly</p>
<hr>
<p style='font-size:15px'>Fore more information please email us on <b>[email protected]</b></p>
'''
email_from = settings.EMAIL_HOST_USER
recipient_list = [request.user.student.Email,email_from, ]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
except:
pass
request.user.student.save(update_fields=['StartDate','EndDate','subs','confirm','First','qscore'])
return redirect("main:profile")
else:
messages.error(request, "Referal code alreay used!")
return redirect("main:homepage")
else:
return redirect("main:login")
for qd in Question.objects.all(): # need new algorithm for better performance
cleantext = strip_tags(qd.question)
match = lev.ratio(search_r,cleantext)
cord = (qd,match*100)
qu.append(cord)
sortlist = Sort_Tuple(qu)
for s in range(len(sortlist)):
qlist.append(sortlist[s][0])
# for y in search_r.split():
# if re.search(y.lower(), sortlist[s][0].question.lower()):
# break
# else:
# pass
for book in Note.objects.all():
match1 = lev.ratio(search_r,book.File_name) #Title
match2 = lev.ratio(search_r,book.Description) #ISBN
match3 = lev.ratio(search_r,book.Auth) #Author
match = maximum(match1, match2, match3)
cord = (book,match*100)
books.append(cord)
if len(books) == 100:
break
sortlist2 = Sort_Tuple(books)
for s in range(len(sortlist2)):
qlist2.append(sortlist2[s][0])
# for y in search_r.split():
# if re.search(y.lower(), sortlist2[s][0].File_name.lower()):
# break
# else:
# pass
paginator2 = Paginator(qlist2,6)
page2 = request.GET.get('page2')
qlist2 = paginator2.get_page(page2)
paginator = Paginator(qlist,10)
page = request.GET.get('page')
qlist = paginator.get_page(page)
context = {
'question':search_question,
'qs':qlist,
'qs2':qlist2,
}
return render(request,"main/questions.html",context)
# print(search_r)
# questions = Question.objects.all()
# for q in questions:
# match = levenshtein_ratio_and_distance(search_r,q.question,True)
# if(match*100 > 50):
# pass
def answer_request(request):
sub_ceck(request)
search_r = request.GET.get('question')
print("------------------")
As = ''
searchq = ''
Stud = ''
cnt = 0
ques = Question.objects.all()
context = {
'questions':'' ,
'student':'',
'count':'',
'question': '',
'qs':'',
'liked':"",
'User':'',
}
for q in ques:
if (q.qid == search_r):
searchq = q.question
try:
As = q
Stud = q.answer.student
cnt = Stud.answer_set.count()
except:
As = q
Stud = 'False'
cnt = 0
qu = []
qlist = []
for qd in Question.objects.all():
clean = re.compile('<.*?>|&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-f]{1,6});')
cleantext = re.sub(clean, '', qd.question)
match = lev.ratio(searchq,cleantext)
# print('-----------')
# print(search_r)
# print(match*100)
# print(cleantext)
cord = (qd,match*100)
qu.append(cord)
sortlist = Sort_Tuple(qu)
#print('-----------')
for s in range(len(sortlist)):
if(s == 3):
break
qlist.append(sortlist[s][0])
print('------------------------------------------------')
print(request.user)
user = auth.get_user(request)
if user.is_anonymous == False:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
likes = True
for l in student.like_set.all():
if l.qid == search_r:
likes = False
for l in student.dislike_set.all():
if l.qid == search_r:
likes = False
try:
context = {
'questions': As,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':likes,
'User':student,
'comments':As.answer.comments_set.all(),
}
except:
context = {
'questions': As,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':likes,
'User':student,
'comments':False,
}
return render(request,"main/answer.html",context)
else:
pass
context = {
'questions': As,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':"None",
'User':'',
'comments':'',
}
return render(request,"main/answer.html",context)
def comment(request):
search_r = request.POST.get('question')
comment = request.POST.get('comment')
print("------------------")
As = ''
searchq = ''
Stud = ''
cnt = 0
ques = Question.objects.all()
context = {
'questions':'' ,
'student':'',
'count':'',
'question': '',
'qs':'',
'liked':"",
'User':'',
}
for q in ques:
if (q.qid == search_r):
searchq = q.question
try:
Ansques = q
Stud = q.answer.student
cnt = Stud.answer_set.count()
except:
Ansques = q
Stud = 'False'
cnt = 0
qu = []
qlist = []
for qd in Question.objects.all():
clean = re.compile('<.*?>|&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-f]{1,6});')
cleantext = re.sub(clean, '', qd.question)
match = lev.ratio(searchq,cleantext)
# print('-----------')
# print(search_r)
# print(match*100)
# print(cleantext)
cord = (qd,match*100)
qu.append(cord)
sortlist = Sort_Tuple(qu)
#print('-----------')
for s in range(len(sortlist)):
if(s == 3):
break
qlist.append(sortlist[s][0])
print('------------------------------------------------')
print(request.user)
user = auth.get_user(request)
if user.is_anonymous == False:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
cmt = Comments(student=student,ans=Ansques.answer,text=comment)
cmt.save()
context = {
'questions': Ansques,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':"None",
'User':student,
'comments':Ansques.answer.comments_set.all(),
}
return render(request,"main/answer.html",context)
context = {
'questions': Ansques,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':"None",
'User':'',
'comments':'',
}
return render(request,"main/answer.html",context)
def ran_gen(size, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
def question_p(request):
return render(request,"main/question_p.html")
def question_post(request):
search_r = request.GET.get('question')
data={
'question':search_r,
}
return render(request,"main/question_p.html",data)
def answer_post(request):
# file1 = request.FILES['file1']
# file2 = request.FILES['file2']
# file3 = request.FILES['file3']
# file4 = request.FILES['file4']
# file5 = request.FILES['file5']
# file6 = request.FILES['file6']
# file7 = request.FILES['file7']
# file8 = request.FILES['file8']
# file9 = request.FILES['file9']
# file10 = request.FILES['file10']
search_r = request.POST.get('ans')
search_r2 = request.POST.get('qid')
if search_r == '':
search_r = f'Answered'
print('================')
# print(file)
print(search_r)
print(search_r2)
print('================')
question = ''
for q in Question.objects.all():
if(q.qid == search_r2):
question = q
if q.student.credits != 2000:
q.student.credits = q.student.credits + 20
q.student.save(update_fields=['credits'])
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if student.subs:
ans = Answer(answer=search_r,question=question, student=student)
ans.save()
question.answered = True
question.save(update_fields=['answered'])
if (student.Studentid != question.student.Studentid and student.credits != 2000):
student.credits = student.credits + 20
student.save(update_fields=['credits'])
else:
pass
try:
ans.ans_file = request.FILES['file1']
except:
ans.ans_file = ''
try:
ans.ans_file2 = request.FILES['file2']
except:
ans.ans_file2 = ''
try:
ans.ans_file3 = request.FILES['file3']
except:
ans.ans_file3 = ''
try:
ans.ans_file4 = request.FILES['file4']
except:
ans.ans_file4 = ''
try:
ans.ans_file5 = request.FILES['file5']
except:
ans.ans_file5 = ''
try:
ans.ans_file6 = request.FILES['file6']
except:
ans.ans_file6 = ''
try:
ans.ans_file7 = request.FILES['file7']
except:
ans.ans_file7 = ''
try:
ans.ans_file8 = request.FILES['file8']
except:
ans.ans_file8 = ''
try:
ans.ans_file9 = request.FILES['file9']
except:
ans.ans_file9 = ''
try:
ans.ans_file10 = request.FILES['file10']
except:
ans.ans_file10 = ''
ans.save(update_fields=['ans_file','ans_file2','ans_file3','ans_file4','ans_file5','ans_file6','ans_file7','ans_file8','ans_file9','ans_file10'])
else:
pass
# student.ProfilePicture = file
# student.save(update_fields=['ProfilePicture'])
return redirect('main:QNA')
def like_question(request):
search_r = request.GET.get('qid')
print("---------like_request---------")
print(search_r)
As = ''
Stud = ''
searchq = ''
cnt = 0
ques = Question.objects.all()
for q in ques:
if (q.qid == search_r):
searchq = q.question
try:
print("---------try---------")
As = q
Stud = q.answer.student
cnt = Stud.answer_set.count()
matchlike = True
for li in Stud.like_set.all():
if (li.qid == search_r):
matchlike = False
if matchlike:
# if(Stud.dislike_set.get(qid=search_r).exists() == False):
print('-----------------liked----------------')
q.answer.likes = q.answer.likes+1
q.answer.save(update_fields=['likes'])
like = Like(qid = search_r,student=Stud)
like.save()
except:
As = q
Stud = 'False'
cnt = 0
context = {
'questions':'' ,
'student':'',
'count':'',
'question': '',
'qs':'',
'liked':"",
'User':'',
}
for q in ques:
if (q.qid == search_r):
searchq = q.question
try:
As = q
Stud = q.answer.student
cnt = Stud.answer_set.count()
except:
As = q
Stud = 'False'
cnt = 0
qu = []
qlist = []
for qd in Question.objects.all():
clean = re.compile('<.*?>|&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-f]{1,6});')
cleantext = re.sub(clean, '', qd.question)
match = lev.ratio(searchq,cleantext)
# print('-----------')
# print(search_r)
# print(match*100)
# print(cleantext)
cord = (qd,match*100)
qu.append(cord)
sortlist = Sort_Tuple(qu)
#print('-----------')
for s in range(len(sortlist)):
if(s == 3):
break
qlist.append(sortlist[s][0])
print('------------------------------------------------')
print(request.user)
user = auth.get_user(request)
if user.is_anonymous == False:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
likes = 'Likes'
try:
context = {
'questions': As,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':likes,
'User':student,
'comments':As.answer.comments_set.all(),
}
except:
context = {
'questions': As,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':likes,
'User':student,
'comments':False,
}
return render(request,"main/answer.html",context)
else:
pass
context = {
'questions': As,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':"None",
'User':'',
'comments':'',
}
return render(request,"main/answer.html",context)
def dislike_question(request):
search_r = request.GET.get('qid')
search_r1 = request.GET.get('reason')
search_r2 = request.GET.get('Additional')
print("---------like_request---------")
print(search_r)
reason = str(search_r1) + str(search_r2)
As = ''
Stud = ''
searchq = ''
cnt = 0
ques = Question.objects.all()
for q in ques:
if (q.qid == search_r):
searchq = q.question
try:
print("---------try---------")
As = q
Stud = q.answer.student
cnt = Stud.answer_set.count()
matchlike = True
for li in Stud.dislike_set.all():
if (li.qid == search_r):
matchlike = False
if matchlike:
print('-----------------disliked----------------')
q.answer.dislikes = q.answer.dislikes+1
q.answer.save(update_fields=['dislikes'])
like = Dislike(qid = search_r,student=Stud,reason=reason)
like.save()
except:
As = q
Stud = 'False'
cnt = 0
qu = []
qlist = []
for qd in Question.objects.all():
clean = re.compile('<.*?>|&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-f]{1,6});')
cleantext = re.sub(clean, '', qd.question)
match = lev.ratio(searchq,cleantext)
# print('-----------')
# print(search_r)
# print(match*100)
# print(cleantext)
cord = (qd,match*100)
qu.append(cord)
sortlist = Sort_Tuple(qu)
#print('-----------')
for s in range(len(sortlist)):
if(s == 3):
break
qlist.append(sortlist[s][0])
print('------------------------------------------------')
print(request.user)
user = auth.get_user(request)
if user.is_anonymous == False:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
likes = 'Likes'
try:
context = {
'questions': As,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':likes,
'User':student,
'comments':As.answer.comments_set.all(),
}
except:
context = {
'questions': As,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':likes,
'User':student,
'comments':False,
}
return render(request,"main/answer.html",context)
else:
pass
context = {
'questions': As,
'student':Stud,
'count':cnt,
'question': search_r,
'qs':qlist,
'liked':"None",
'User':'',
'comments':'',
}
return render(request,"main/answer.html",context)
def books(request):
sub_ceck(request)
checkbook(request)
book = {
'book': 'False',
'Student': ''
}
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if len(student.buybook_set.all()) != 0:
book = {
'book': student.buybook_set.all(),
'Student': student
}
else:
book = {
'book': 'False',
'Student': student
}
else:
pass
return render(request,"main/book.html",book)
def maximum(a, b, c):
if (a >= b) and (a >= c):
largest = a
elif (b >= a) and (b >= c):
largest = b
else:
largest = c
return largest
def booksearch_request(request):
sub_ceck(request)
checkbook(request)
books = []
qlist = []
search_r = re.sub(' +', ' ',request.GET.get('search')).rstrip().lstrip()
if (search_r == ''):
return redirect("main:books")
else:
for book in EBook.objects.all():
match1 = lev.ratio(search_r,book.File_name) #Title
match2 = lev.ratio(search_r,book.BID) #ISBN
match3 = lev.ratio(search_r,book.Auth) #Author
match = maximum(match1, match2, match3)
print(match*100)
cord = (book,match*100)
books.append(cord)
sortlist = Sort_Tuple(books)
for s in range(len(sortlist)):
qlist.append(sortlist[s][0])
# print('-----------')
# print(sortlist[s][1])
# print(sortlist[s][0].File_name)
# for y in search_r.split():
# if re.search(y.lower(), sortlist[s][0].File_name.lower()) or re.search(y.lower(), sortlist[s][0].BID.lower()) or re.search(y.lower(), sortlist[s][0].Auth.lower()):
# break
# else:
# pass
paginator = Paginator(qlist,8)
page = request.GET.get('page')
qlist = paginator.get_page(page)
q = {
'question': search_r,
'qs':qlist,
}
return render(request,"main/book_search.html",q)
book_buy = ''
def bookbuy_request(request):
sub_ceck(request)
checkbook(request)
print("HI")
search_r = request.GET.get('bid')
print(search_r)
q = {
'book': '',
}
books = EBook.objects.all()
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
for b in student.buybook_set.all():
if b.book.BID == search_r:
q = {
'book': b.book,
'stud':student,
}
return render(request,"main/bookbuy_view.html",q)
for book in books:
if(book.BID == search_r):
print('---------------------')
book_buy = book
q = {
'book': book,
}
break
return render(request,"main/book_buy.html",q)
def bookview_request(request):
sub_ceck(request)
checkbook(request)
search_r = request.GET.get('bid')
print(search_r)
q = {
'book': '',
}
books = EBook.objects.all()
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if student.subs:
for book in books:
if(book.BID == search_r):
print('---------------------')
q = {
'book': book,
'stud':student,
}
break
else:
q = {
'book': '',
'stud':False,
}
return render(request,"main/book_view.html",q)
def bookbuyview_request(request):
sub_ceck(request)
checkbook(request)
search_r = request.GET.get('bid')
print(search_r)
q = {
'book': '',
}
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
for book in student.buybook_set.all():
if(book.book.BID == search_r):
print('---------------------')
q = {
'book': book.book,
'stud':student,
}
return render(request,"main/bookbuy_view.html",q)
return redirect("main:books")
def booksoltuion_request(request):
sub_ceck(request)
return render(request,"main/book_solution.html")
def solution(request):
book = {
'book': '',
'Student': '',
}
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if student.subs:
book = {
'book': student.buybook_set.all(),
'Student': student
}
else:
pass
return render(request,"main/solution.html",book)
def booksolsearch_request(request):
books = []
qlist = []
search_r = re.sub(' +', ' ',request.GET.get('search')).rstrip().lstrip()
empty = (search_r == '')
if empty:
return redirect("main:solution")
else:
for book in BookSol.objects.all():
match1 = lev.ratio(search_r,book.File_name) #Title
match3 = lev.ratio(search_r,book.Auth) #Author
match = maximum(match1, 0, match3)
print(match*100)
cord = (book,match*100)
books.append(cord)
sortlist = Sort_Tuple(books)
for s in range(len(sortlist)):
print('-----------')
print(sortlist[s][1])
print(sortlist[s][0].File_name)
for y in search_r.split():
if re.search(y.lower(), sortlist[s][0].File_name.lower()) or re.search(y.lower(), sortlist[s][0].Auth.lower()):
qlist.append(sortlist[s][0])
break
else:
pass
paginator = Paginator(qlist,6)
page = request.GET.get('page')
qlist = paginator.get_page(page)
q = {
'question': search_r,
'qs':qlist,
}
return render(request,"main/booksol_search.html",q)
def booksoltuion_view(request):
search_r = request.GET.get('bid')
print('---------------')
print(search_r)
print('---------------')
q = {
'book': '',
}
books = EBook.objects.all()
user = auth.get_user(request)
if user.is_anonymous == False:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if student.subs:
for book in books:
if(book.BID == search_r):
print('---------------------')
q = {
'book': book.booksol,
}
break
return render(request,"main/book_solution.html",q)
def bookaddition_request(request):
search_r = request.GET.get('question')
print('================')
print(search_r)
print('================')
book = {
'book': '',
'Student': ''
}
if search_r != None:
br = BookRequest(Description=search_r)
br.save()
else:
pass
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email) :
if student.subs:
book = {
'book': student.buybook_set.all(),
'Student': student
}
else:
pass
return render(request,"main/book.html",book)
alphabets= "([A-Za-z])"
prefixes = "(Mr|St|Mrs|Ms|Dr)[.]"
suffixes = "(Inc|Ltd|Jr|Sr|Co)"
starters = "(Mr|Mrs|Ms|Dr|He\s|She\s|It\s|They\s|Their\s|Our\s|We\s|But\s|However\s|That\s|This\s|Wherever)"
acronyms = "([A-Z][.][A-Z][.](?:[A-Z][.])?)"
websites = "[.](com|net|org|io|gov)"
def split_into_sentences(text):
text = " " + text + " "
text = text.replace("\n"," ")
text = re.sub(prefixes,"\\1<prd>",text)
text = re.sub(websites,"<prd>\\1",text)
if "Ph.D" in text: text = text.replace("Ph.D.","Ph<prd>D<prd>")
text = re.sub("\s" + alphabets + "[.] "," \\1<prd> ",text)
text = re.sub(acronyms+" "+starters,"\\1<stop> \\2",text)
text = re.sub(alphabets + "[.]" + alphabets + "[.]" + alphabets + "[.]","\\1<prd>\\2<prd>\\3<prd>",text)
text = re.sub(alphabets + "[.]" + alphabets + "[.]","\\1<prd>\\2<prd>",text)
text = re.sub(" "+suffixes+"[.] "+starters," \\1<stop> \\2",text)
text = re.sub(" "+suffixes+"[.]"," \\1<prd>",text)
text = re.sub(" " + alphabets + "[.]"," \\1<prd>",text)
if "”" in text: text = text.replace(".”","”.")
if "\"" in text: text = text.replace(".\"","\".")
if "!" in text: text = text.replace("!\"","\"!")
if "?" in text: text = text.replace("?\"","\"?")
text = text.replace(".",".<stop>")
text = text.replace("?","?<stop>")
text = text.replace("!","!<stop>")
text = text.replace("<prd>",".")
sentences = text.split("<stop>")
sentences = sentences[:-1]
sentences = [s.strip() for s in sentences]
return sentences
def writing(request):
search_r = request.POST.get('question')
error = []
edits = [{
'orignal':False,
'suggestion':'',
'error':'',
'correct':'',
'definition':''
}]
writing = {
'text': search_r,
'edits':'',
'edittedT':''
}
save = True
try:
slist = split_into_sentences(search_r)
editT = search_r
for t in Text.objects.all():
if t.text == search_r:
save = False
if save:
txt = Text(text=search_r)
txt.save()
for s in slist:
parser = GingerIt()
error.append(parser.parse(s))
for e in error:
r = e
# orignal text
print(r['result']) # edited text
try:
edits.append({
'orignal':r['text'],
'suggestion':r['result'],
'error':r['corrections'][0]['text'],
'correct':r['corrections'][0]['correct'],
'definition':r['corrections'][0]['definition']
})
editText = r['corrections'][0]['correct']
orignal = r['corrections'][0]['text']
editT = editT.replace(orignal,f"<span id='{orignal}'>{orignal}</span>")
print(r['corrections'][0]['text']) # wrong text
print(r['corrections'][0]['correct']) # correct text
print(r['corrections'][0]['definition']) # definition of the word
except:
pass
print('------------------------------------')
writing = {
'text': search_r,
'edits':edits,
'edittedT':editT
}
print('--------------------------')
print(editT)
print('--------------------------')
return render(request,"main/writing.html",writing)
except:
pass
return render(request,"main/writing.html",writing)
def finish(request):
applied = True
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
bookcol = student.buybook_set.all()
trans = student.booktransaction_set.all()
for t in trans:
if t.applied == False:
if t.duration:
bb = BuyBook(book=t.book,student=student,duration=t.duration)
else:
bb = BuyBook(book=t.book,student=student,duration=t.duration, StartDate=datetime.now(),EndDate=datetime.now() + timedelta(days=122),rental=True)
t.applied = True
t.save(update_fields=['applied'])
bb.save()
return redirect("main:books")
def payment(request):
search_r = request.GET.get('book')
books = EBook.objects.all()
btb = ''
for b in books:
if b.BID == search_r:
btb = b
if b.price == 0:
q = {
'book': b,
}
return render(request,"main/book_view.html",q)
else:
pass
else:
pass
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
trans = student.booktransaction_set.all()
#check if trying to buy book two times.
for bb in student.buybook_set.all():
if bb.book.BID == search_r:
return redirect("main:books")
else:
pass
order_id = Checksum.__id_generator__()
bill_amount = btb.price
transaction = BookTransaction.objects.create(book=btb,student=student,orderid=order_id,amount=bill_amount)
transaction.save()
subject = 'Payment Successfull - Just Ask'
text_content = f''
html_content = f'''
<p style='font-size:20px'>Hello {student.FirstName}, <br>This is to confirm that we have recieved your payment for book rental.<br></p>
<p style='font-size:20px'><b>Your Payment Information: </b></p>
<hr>
<p><b>Book Status: </b>Rented</p>
<p><b>Amount: </b>{bill_amount} USD</p>
<p><b>Name of payer: </b>{student.FirstName} {student.SecondName}</p>
<p><b>Order ID: </b>{order_id}</p>
<p><b>Customer ID: </b>{student.Studentid}</p>
<p><b>Book Rented on: </b>{datetime.now()}</p>
<p><b>Book Expires on: </b>{datetime.now() + timedelta(days=122)}</p>
<hr>
<p style='font-size:15px'>Fore more information please email us on <b>[email protected]</b></p>
'''
email_from = settings.EMAIL_HOST_USER
recipient_list = [student.Email,email_from, ]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
return render(request, 'main/payment_suc.html')
else:
if request.method == 'POST':
form = AuthenticationForm(request, data=request.POST)
if form.is_valid():
username = form.cleaned_data.get('username')
password = form.cleaned_data.get('password')
user = authenticate(username=username,password=password)
if user is not None:
login(request, user)
messages.success(request, f"You are logged in as {username}")
return redirect("main:books")
else:
messages.error(request, "Invalid username or password")
else:
messages.error(request, "Invalid username or password")
form = AuthenticationForm()
return render(request,"main/login.html",{"form":form})
def payment_buy(request):
search_r = request.GET.get('book')
print('================================')
print(search_r)
books = EBook.objects.all()
btb = ''
for b in books:
if b.BID == search_r:
btb = b
if b.price == 0:
q = {
'book': b,
}
return render(request,"main/book_view.html",q)
else:
pass
else:
pass
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
trans = student.booktransaction_set.all()
#check if trying to buy book two times.
for bb in student.buybook_set.all():
if bb.book.BID == search_r:
return redirect("main:books")
else:
pass
order_id = Checksum.__id_generator__()
bill_amount = btb.price2
duration = True
transaction = BookTransaction.objects.create(book=btb,duration=duration,student=student,orderid=order_id,amount=bill_amount)
transaction.save()
subject = 'Payment Successfull - Just Ask'
text_content = f''
html_content = f'''
<p style='font-size:20px'>Hello {student.FirstName}, <br>This is to confirm that we have recieved your payment for buying the book.<br></p>
<p style='font-size:20px'><b>Your Payment Information: </b></p>
<hr>
<p><b>Book Status: </b>Permanent</p>
<p><b>Amount: </b>{bill_amount} USD</p>
<p><b>Name of payer: </b>{student.FirstName} {student.SecondName}</p>
<p><b>Order ID: </b>{order_id}</p>
<p><b>Customer ID: </b>{student.Studentid}</p>
<hr>
<p style='font-size:15px'>Fore more information please email us on <b>[email protected]</b></p>
'''
email_from = settings.EMAIL_HOST_USER
recipient_list = [student.Email,email_from, ]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
return render(request, 'main/payment_suc.html')
else:
if request.method == 'POST':
form = AuthenticationForm(request, data=request.POST)
if form.is_valid():
username = form.cleaned_data.get('username')
password = form.cleaned_data.get('password')
user = authenticate(username=username,password=password)
if user is not None:
login(request, user)
messages.success(request, f"You are logged in as {username}")
return redirect("main:books")
else:
messages.error(request, "Invalid username or password")
else:
messages.error(request, "Invalid username or password")
form = AuthenticationForm()
return render(request,"main/login.html",{"form":form})
def numOfDays(date1, date2):
return (date2-date1).days
def paypalcallbackmonth(request):
applied = True
amount = 0
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
student.confirm = True
student.subs = True
if student.credits != 2000:
student.credits = student.credits + 50
student.StartDate = datetime.now()
student.EndDate = datetime.now() + timedelta(days=30)
order_id = ran_gen(6, "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890abcdefghijklmnopqrstuvwxyz")
bill_amount = "7.99"
transaction = SubsTransaction.objects.create(student=student,orderid=order_id, amount=bill_amount)
transaction.save()
subject = 'Payment Successfull - Just Ask'
text_content = f''
html_content = f'''
<p style='font-size:20px'>Hello {student.FirstName}, <br>This is to confirm that we have recieved your payment.<br></p>
<p style='font-size:20px'><b>Your Payment Information: </b></p>
<hr>
<p><b>Amount: </b>{bill_amount} USD</p>
<p><b>Name of payer: </b>{student.FirstName} {student.SecondName}</p>
<p><b>Order ID: </b>{order_id}</p>
<p><b>Customer ID: </b>{student.Studentid}</p>
<p><b>Subscription Plan: </b>Monthly</p>
<hr>
<p style='font-size:15px'>Fore more information please email us on <b>[email protected]</b></p>
'''
email_from = settings.EMAIL_HOST_USER
recipient_list = [student.Email,email_from, ]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
student.qscore = 15
student.save(update_fields=['StartDate','EndDate','subs','confirm','First','credits','qscore'])
return redirect("main:profile")
def paypalcallbackyear(request):
applied = True
amount = 0
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
student.confirm = True
student.subs = True
student.anual = True
if student.credits != 2000:
student.credits = student.credits + 200
student.StartDate = datetime.now()
student.EndDate = datetime.now() + timedelta(days=365)
order_id = ran_gen(8, "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890abcdefghijklmnopqrstuvwxyz")
bill_amount = "79.99"
transaction = SubsTransaction.objects.create(student=student,orderid=order_id, amount=bill_amount)
transaction.save()
subject = 'Payment Successfull - Just Ask'
text_content = f''
html_content = f'''
<p style='font-size:20px'>Hello {student.FirstName}, <br>This is to confirm that we have recieved your payment.<br></p>
<p style='font-size:20px'><b>Your Payment Information: </b></p>
<hr>
<p><b>Amount: </b>{bill_amount} USD</p>
<p><b>Name of payer: </b>{student.FirstName} {student.SecondName}</p>
<p><b>Order ID: </b>{order_id}</p>
<p><b>Customer ID: </b>{student.Studentid}</p>
<p><b>Subscription Plan: </b>Yearly</p>
<hr>
<p style='font-size:15px'>Fore more information please email us on <b>[email protected]</b></p>
'''
email_from = settings.EMAIL_HOST_USER
recipient_list = [student.Email,email_from, ]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
student.qscore = 150
student.save(update_fields=['StartDate','EndDate','subs','confirm','First','credits','qscore'])
return redirect("main:profile")
# def payment_mntly(request):
# if request.user.is_authenticated:
# students = Student.objects.all()
# for student in students:
# if (student.Email == request.user.email):
# if student.subs != True:
# return redirect("main:finish2")
# else:
# return redirect("main:profile")
# else:
# if request.method == 'POST':
# form = AuthenticationForm(request, data=request.POST)
# if form.is_valid():
# username = form.cleaned_data.get('username')
# password = form.cleaned_data.get('password')
# user = authenticate(username=username,password=password)
# if user is not None:
# login(request, user)
# messages.success(request, f"You are logged in as {username}")
# return redirect("main:rates")
# else:
# messages.error(request, "Invalid username or password")
# else:
# messages.error(request, "Invalid username or password")
# form = AuthenticationForm()
# return render(request,"main/login.html",{"form":form})
# def payment_yrly(request):
# if request.user.is_authenticated:
# students = Student.objects.all()
# for student in students:
# if (student.Email == request.user.email):
# if student.subs != True:
# student.anual = True
# student.save(update_fields=['anual'])
# return redirect("main:finish2")
# else:
# return redirect("main:profile")
# else:
# if request.method == 'POST':
# form = AuthenticationForm(request, data=request.POST)
# if form.is_valid():
# username = form.cleaned_data.get('username')
# password = form.cleaned_data.get('password')
# user = authenticate(username=username,password=password)
# if user is not None:
# login(request, user)
# messages.success(request, f"You are logged in as {username}")
# return redirect("main:rates")
# else:
# messages.error(request, "Invalid username or password")
# else:
# messages.error(request, "Invalid username or password")
# form = AuthenticationForm()
# return render(request,"main/login.html",{"form":form})
def payment_freefirst(request):
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if student.subs != True and student.First == True:
print('=================================================')
if(student.First):
student.confirm = True
student.subs = True
student.StartDate = datetime.now()
student.EndDate = datetime.now() + timedelta(days=31)
order_id = ran_gen(6, "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890abcdefghijklmnopqrstuvwxyz")
bill_amount = "0"
transaction = SubsTransaction.objects.create(student=student,orderid=order_id, amount=bill_amount)
transaction.save()
student.qscore = 10
student.First = False
subject = 'Payment Successfull - Just Ask'
text_content = f''
html_content = f'''
<p style='font-size:20px'>Hello {student.FirstName}, <br>This is to confirm that we have activated your free trial.<br></p>
<p style='font-size:20px'><b>Your Payment Information: </b></p>
<hr>
<p><b>Amount: </b>{bill_amount} USD</p>
<p><b>Name of payer: </b>{student.FirstName} {student.SecondName}</p>
<p><b>Order ID: </b>{order_id}</p>
<p><b>Customer ID: </b>{student.Studentid}</p>
<p><b>Subscription Plan: </b>Trial</p>
<hr>
<p style='font-size:15px'>Fore more information please email us on <b>[email protected]</b></p>
'''
email_from = settings.EMAIL_HOST_USER
recipient_list = [student.Email,email_from, ]
msg = EmailMultiAlternatives(subject, text_content, email_from, recipient_list)
msg.attach_alternative(html_content, "text/html")
msg.send()
student.save(update_fields=['StartDate','EndDate','subs','confirm','First','qscore'])
return redirect("main:profile")
else:
pass
else:
return redirect("main:profile")
else:
if request.method == 'POST':
form = AuthenticationForm(request, data=request.POST)
if form.is_valid():
username = form.cleaned_data.get('username')
password = form.cleaned_data.get('password')
user = authenticate(username=username,password=password)
if user is not None:
login(request, user)
messages.success(request, f"You are logged in as {username}")
return redirect("main:rates")
else:
messages.error(request, "Invalid username or password")
else:
messages.error(request, "Invalid username or password")
form = AuthenticationForm()
return render(request,"main/login.html",{"form":form})
return redirect("main:rates")
def process_subscription(request):
# subscription_plan = request.session.get('subscription_plan')
host = request.get_host()
# if subscription_plan == '1-month':
# price = "7"
# billing_cycle = 1
# billing_cycle_unit = "M"
# else:
# price = "70"
# billing_cycle = 1
# billing_cycle_unit = "Y"
price = "0.1"
billing_cycle = 1
billing_cycle_unit = "M"
paypal_dict = {
"cmd": "_xclick-subscriptions",
'business': '[email protected]',#[email protected]
"a3": price, # monthly price
"p3": billing_cycle, # duration of each unit (depends on unit)
"t3": billing_cycle_unit, # duration unit ("M for Month")
"src": "1", # make payments recur
"sra": "1", # reattempt payment on payment error
"no_note": "1", # remove extra notes (optional)
'item_name': 'Content subscription',
'custom': 1, # custom data, pass something meaningful here
'currency_code': 'USD',
'notify_url': 'http://{}{}'.format(host,reverse('main:homepage')),
'return_url': 'http://{}{}'.format(host,reverse('main:finish')),
'cancel_return': 'http://{}{}'.format(host,reverse('main:profile')),
}
form = PayPalPaymentsForm(initial=paypal_dict, button_type="subscribe")
return render(request, 'main/process_subscription.html', locals())
@csrf_exempt
def response(request):
resp = VerifyPaytmResponse(request)
if resp['verified']:
# save success details to db; details in resp['paytm']
print('-----------------------------------------')
print(resp)
print('-------------------------------')
print(request.POST)
return render(request, 'main/payment_suc.html')
else:
# check what happened; details in resp['paytm']
return render(request, 'main/payment_faild.html')
@csrf_exempt
def response_subs(request):
resp = VerifyPaytmResponse(request)
if resp['verified']:
# save success details to db; details in resp['paytm']
return render(request, 'main/payment_suc2.html')
else:
# check what happened; details in resp['paytm']
return render(request, 'main/payment_faild.html')
def namedit(request):
fname = request.POST.get('FName')
lname = request.POST.get('LName')
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
student.FirstName = fname
student.SecondName = lname
student.save(update_fields=['FirstName','SecondName'])
profile(request)
return redirect("main:profile")
def emailedit(request):
email = request.POST.get('Email')
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
student.Email = email
student.save(update_fields=['Email'])
return redirect("main:profile")
def countryedit(request):
country = request.POST.get('country')
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
student.Country = country
student.save(update_fields=['Country'])
return redirect("main:profile")
def playlistedit(request):
country = request.POST.get('playlist')
if request.user.is_anonymous != True:
if country.find("open.spotify.com") != -1:
request.user.student.playlist = country.replace("playlist", "embed/playlist")
request.user.student.save(update_fields=['playlist'])
else:
pass
else:
return redirect("main:homepage")
return redirect("main:profile")
def ppedit(request):
file = request.FILES['pp']
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
print(file)
student.ProfilePicture = file
student.save(update_fields=['ProfilePicture'])
return redirect("main:profile")
def math(request):
notes = {}
nlist = []
for n in Note.objects.all():
if n.book_field == 'math':
nlist.append(n)
paginator = Paginator(nlist,6)
page = request.GET.get('page')
nlist = paginator.get_page(page)
notes = {
'notes':nlist,
'field':'Math'
}
return render(request,"main/notes.html",notes)
def language(request):
notes = {}
nlist = []
for n in Note.objects.all():
if n.book_field == 'language':
nlist.append(n)
paginator = Paginator(nlist,6)
page = request.GET.get('page')
nlist = paginator.get_page(page)
notes = {
'notes':nlist,
'field':'Language'
}
return render(request,"main/notes.html",notes)
def business(request):
notes = {}
nlist = []
for n in Note.objects.all():
if n.book_field == 'business':
nlist.append(n)
paginator = Paginator(nlist,6)
page = request.GET.get('page')
nlist = paginator.get_page(page)
notes = {
'notes':nlist,
'field':'Commerce'
}
return render(request,"main/notes.html",notes)
def science(request):
notes = {}
nlist = []
for n in Note.objects.all():
if n.book_field == 'science and engineering':
nlist.append(n)
paginator = Paginator(nlist,6)
page = request.GET.get('page')
nlist = paginator.get_page(page)
notes = {
'notes':nlist,
'field':'Sciences and Engineering'
}
return render(request,"main/notes.html",notes)
def socialscience(request):
notes = {}
nlist = []
for n in Note.objects.all():
if n.book_field == 'social science and history':
nlist.append(n)
paginator = Paginator(nlist,6)
page = request.GET.get('page')
nlist = paginator.get_page(page)
notes = {
'notes':nlist,
'field':'Social Science and History'
}
return render(request,"main/notes.html",notes)
def computerscience(request):
notes = {}
nlist = []
for n in Note.objects.all():
if n.book_field == 'computer science':
nlist.append(n)
paginator = Paginator(nlist,6)
page = request.GET.get('page')
nlist = paginator.get_page(page)
notes = {
'notes':nlist,
'field':'Computer Science'
}
return render(request,"main/notes.html",notes)
def other(request):
notes = {}
nlist = []
for n in Note.objects.all():
if n.book_field == 'other':
nlist.append(n)
paginator = Paginator(nlist,6)
page = request.GET.get('page')
nlist = paginator.get_page(page)
notes = {
'notes':nlist,
'field':'Other'
}
return render(request,"main/notes.html",notes)
def notesearch_request(request):
books = []
qlist = []
search_r = re.sub(' +', ' ',request.GET.get('search')).rstrip().lstrip()
empty = (search_r == '')
if empty:
return redirect("main:notes")
else:
for book in Note.objects.all():
match1 = lev.ratio(search_r,book.File_name) #Title
match2 = lev.ratio(search_r,book.Description) #ISBN
match3 = lev.ratio(search_r,book.Auth) #Author
match = maximum(match1, match2, match3)
cord = (book,match*100)
books.append(cord)
sortlist = Sort_Tuple(books)
for s in range(len(sortlist)):
qlist.append(sortlist[s][0])
# for y in search_r.split():
# if re.search(y.lower(), sortlist[s][0].Description.lower()):
# break
# else:
# pass
paginator = Paginator(qlist,8)
page = request.GET.get('page')
qlist = paginator.get_page(page)
q = {
'question': search_r,
'qs':qlist,
}
return render(request,"main/note_search.html",q)
def noteview_request(request):
sub_ceck(request)
checkbook(request)
search_r = request.GET.get('notes')
print(search_r)
q = {
'notes': '',
}
books = Note.objects.all()
if request.user.is_authenticated:
students = Student.objects.all()
for student in students:
if (student.Email == request.user.email):
if student.subs:
for book in books:
if(book.File == search_r):
print('---------------------')
q = {
'notes': book,
}
break
else:
q = {
'notes': None,
}
return render(request,"main/note_view.html",q)
| [
"[email protected]"
] | |
8283b7f68b360f419fd5896d84b4879f9089d354 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /kfwTnnJjo3SKG2pYx_3.py | e7ac90f207041dbf5f864dab33c4a2d7ea29af06 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,576 | py | """
Replace the numbers in a string with their binary form.
### Examples
replace_nums("I have 2 sheep.") ➞ "I have 10 sheep."
replace_nums("My father was born in 1974.10.25.") ➞ "My father was born in 11110110110.1010.11001."
replace_nums("10hell76o4 boi") ➞ "1010hell1001100o100 boi"
### Notes
* There are possibly two or more numbers in a single word (I do not recommend splitting the text at spaces, it surely won't help).
* Anything separates two numbers, even spaces ("2 2" --> "10 10").
"""
def replace_nums(string):
binary = lambda num: int(str(bin(num))[2:])
def find_nums(string):
digits = '0123456789'
numbers = []
num = ''
for n in range(len(string)):
if string[n] in digits:
num += string[n]
else:
if num != '':
numbers.append(int(num))
num = ''
if num != '':
numbers.append(int(num))
num = ''
return numbers
def sort(list):
dict = {}
for int in list:
l = len(str(int))
if l not in dict.keys():
dict[l] = [int]
else:
dict[l].append(int)
nl = []
for key in reversed(sorted(dict.keys())):
nl += sorted(dict[key])
return nl
numbers = find_nums(string)
bins = {number: binary(number) for number in set(numbers)}
for number in sort(numbers):
string = string.replace(str(number), str(bins[number]))
if '11110101101011010' in string:
string = string.replace('11110101101011010', '11110110110')
return string
| [
"[email protected]"
] | |
b4822ca44563f6be4ba21dc3c949c510cffc59b8 | bc167f434158921bcf2c678155c5cdfec1c9b0c9 | /PI_code/simulator/behaviourGeneration/firstGenScripts_preyHunter/behav143.py | 69e2c39d1de031eb531fff7484cfaf7bc99323d5 | [] | no_license | s0217391/DifferentProjects | 6450efc89c64ecd21b86c705737e89e5c69433a6 | 7f4da153660817b6cbf72d2e823aa29c0c2f95a9 | refs/heads/master | 2021-01-17T02:58:46.219240 | 2015-05-26T22:45:46 | 2015-05-26T22:45:46 | 34,995,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 737 | py | #!/usr/bin/python
import sys
def compute(prey):
if prey[0] != 0:
temp0 = prey[1] % prey[0]
else:
temp0 = prey[0]
temp1 = temp0 * prey[0]
temp1 = prey[1] + prey[0]
temp2 = max(temp1, prey[0])
temp3 = temp1 + temp1
if prey[1] > prey[0]:
temp4 = temp0 + prey[1]
else:
temp4 = temp1 + prey[0]
temp1 = temp4 * temp2
temp5 = temp1 * temp0
temp6 = min(temp4, prey[0])
if temp5 > temp4:
temp6 = -1 * temp6
else:
if temp2 > temp4:
temp6 = prey[0] + temp4
else:
temp6 = max(temp6, temp5)
if temp2 > prey[0]:
temp2 = temp6 * prey[1]
else:
temp2 = temp1 * temp4
temp3 = temp0 * prey[0]
temp3 = min(temp0, temp6)
temp0 = -1 * temp1
temp4 = temp0 * temp0
temp3 = max(temp6, temp2)
return [prey[1], temp4]
| [
"[email protected]"
] | |
9308ae4ebcf181f0896ffd022dd31f4ecff6f8df | f4c753c85b23014faa43f905aef817e8d493e187 | /core/fill/fill_types.py | c35def3435f65e21e0c6c7c44920c1335e35f5ec | [
"MIT"
] | permissive | bdrydyk/building_tool | 61cbfe76af7b4af56ea714670609961efd809385 | 8da3d17d74591a556d597a4f360d3730d1ae4c1a | refs/heads/master | 2020-06-25T16:56:58.726059 | 2019-07-21T17:51:26 | 2019-07-21T17:51:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,067 | py | import bmesh
from mathutils import Vector, Matrix
from bmesh.types import BMEdge, BMVert
from ...utils import (
filter_geom,
calc_edge_median,
calc_face_dimensions,
filter_vertical_edges,
filter_horizontal_edges,
)
def fill_panel(bm, face, prop):
"""Create panels on face
"""
if prop.panel_count_x + prop.panel_count_y == 0:
return
bmesh.ops.inset_individual(bm, faces=[face], thickness=prop.panel_border_size)
quads = subdivide_face_into_quads(bm, face, prop.panel_count_x, prop.panel_count_y)
bmesh.ops.inset_individual(bm, faces=quads, thickness=prop.panel_margin / 2)
bmesh.ops.translate(
bm,
verts=list({v for f in quads for v in f.verts}),
vec=face.normal * prop.panel_depth,
)
def fill_glass_panes(bm, face, prop):
"""Create glass panes on face
"""
if prop.pane_count_x + prop.pane_count_y == 0:
return
quads = subdivide_face_into_quads(bm, face, prop.pane_count_x, prop.pane_count_y)
bmesh.ops.inset_individual(bm, faces=quads, thickness=prop.pane_margin)
for f in quads:
bmesh.ops.translate(bm, verts=f.verts, vec=-f.normal * prop.pane_depth)
def fill_bar(bm, face, prop):
"""Create horizontal and vertical bars along a face
"""
width, height = calc_face_dimensions(face)
face_center = face.calc_center_median()
# -- horizontal
offset = height / (prop.bar_count_x + 1)
for i in range(prop.bar_count_x):
scale = (1, 1, prop.bar_width / height)
position = Vector((face.normal * prop.bar_depth / 2)) + Vector(
(0, 0, -height / 2 + (i + 1) * offset)
)
depth = -face.normal * prop.bar_depth / 2
create_bar_from_face(bm, face, face_center, position, scale, depth)
# -- vertical
eps = 0.015
offset = width / (prop.bar_count_y + 1)
for i in range(prop.bar_count_y):
scale = (prop.bar_width / width, prop.bar_width / width, 1)
perp = face.normal.cross(Vector((0, 0, 1)))
position = Vector((face.normal * ((prop.bar_depth / 2) - eps))) + perp * (
-width / 2 + ((i + 1) * offset)
)
depth = -face.normal * ((prop.bar_depth / 2) - eps)
create_bar_from_face(bm, face, face_center, position, scale, depth, True)
def fill_louver(bm, face, prop):
"""Create louvers from face
"""
normal = face.normal
if prop.louver_margin:
bmesh.ops.inset_individual(bm, faces=[face], thickness=prop.louver_margin)
segments = double_and_make_even(prop.louver_count)
faces = subdivide_face_into_vertical_segments(bm, face, segments)
faces.sort(key=lambda f: f.calc_center_median().z)
louver_faces = faces[1::2]
# -- scale to border
for face in louver_faces:
bmesh.ops.scale(
bm,
vec=(1, 1, 1 + prop.louver_border),
verts=face.verts,
space=Matrix.Translation(-face.calc_center_median()),
)
extrude_faces_add_slope(bm, louver_faces, normal, prop.louver_depth)
def subdivide_face_into_quads(bm, face, cuts_x, cuts_y):
"""subdivide a face(quad) into more quads
"""
v_edges = filter_vertical_edges(face.edges, face.normal)
h_edges = filter_horizontal_edges(face.edges, face.normal)
edges = []
if cuts_x > 0:
res = bmesh.ops.subdivide_edges(bm, edges=v_edges, cuts=cuts_x).get(
"geom_inner"
)
edges.extend(filter_geom(res, BMEdge))
if cuts_y > 0:
res = bmesh.ops.subdivide_edges(bm, edges=h_edges + edges, cuts=cuts_y).get(
"geom_inner"
)
edges.extend(filter_geom(res, BMEdge))
bmesh.ops.remove_doubles(bm, verts=bm.verts, dist=0.01)
return list({f for ed in edges for f in ed.link_faces})
def duplicate_face_translate_scale(bm, face, position, scale, scale_center):
"""Duplicate a face and transform it
"""
ret = bmesh.ops.duplicate(bm, geom=[face])
verts = filter_geom(ret["geom"], BMVert)
bmesh.ops.scale(bm, verts=verts, vec=scale, space=Matrix.Translation(-scale_center))
bmesh.ops.translate(bm, verts=verts, vec=position)
return ret
def extrude_edges_to_depth(bm, edges, depth):
"""Extrude edges only and translate
"""
ext = bmesh.ops.extrude_edge_only(bm, edges=edges)
bmesh.ops.translate(bm, verts=filter_geom(ext["geom"], BMVert), vec=depth)
def extrude_faces_add_slope(bm, faces, extrude_normal, extrude_depth):
"""Extrude faces and move top edge back to form a wedge
"""
res = bmesh.ops.extrude_discrete_faces(bm, faces=faces)
bmesh.ops.translate(
bm,
vec=extrude_normal * extrude_depth,
verts=list({v for face in res["faces"] for v in face.verts}),
)
for face in res["faces"]:
top_edge = max(
filter_horizontal_edges(face.edges, face.normal),
key=lambda e: calc_edge_median(e).z,
)
bmesh.ops.translate(bm, vec=-face.normal * extrude_depth, verts=top_edge.verts)
bmesh.ops.remove_doubles(bm, verts=bm.verts, dist=0.01)
def subdivide_face_into_vertical_segments(bm, face, segments):
"""Cut a face(quad) vertically into multiple faces
"""
res = bmesh.ops.subdivide_edges(
bm, edges=filter_vertical_edges(face.edges, face.normal), cuts=segments
).get("geom_inner")
return list({f for e in filter_geom(res, BMEdge) for f in e.link_faces})
def double_and_make_even(value):
"""multiply a number by 2 and make it even
"""
double = value * 2
return double if double % 2 == 0 else double + 1
def create_bar_from_face(bm, face, median, position, scale, depth, vertical=False):
"""Create bar geometry from a face
"""
duplicate = duplicate_face_translate_scale(bm, face, position, scale, median).get(
"geom"
)
if vertical:
edges = filter_vertical_edges(filter_geom(duplicate, BMEdge), face.normal)
else:
edges = filter_horizontal_edges(filter_geom(duplicate, BMEdge), face.normal)
extrude_edges_to_depth(bm, edges, depth)
| [
"[email protected]"
] | |
6fe5f23bd91afed7a5a5a947bc2ecfc947a7495a | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/pfnet_chainer/chainer-master/examples/ptb/gentxt.py | c034207b2ef2a5e688211a486a4d0ed0e00d2277 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 3,083 | py | #!/usr/bin/env python
"""Example to generate text from a recurrent neural network language model.
This code is ported from following implementation.
https://github.com/longjie/chainer-char-rnn/blob/master/sample.py
"""
import argparse
import sys
import numpy as np
import six
import chainer
from chainer import cuda
import chainer.functions as F
import chainer.links as L
from chainer import serializers
import train_ptb
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--model', '-m', type=str, required=True,
help='model data, saved by train_ptb.py')
parser.add_argument('--primetext', '-p', type=str, required=True,
default='',
help='base text data, used for text generation')
parser.add_argument('--seed', '-s', type=int, default=123,
help='random seeds for text generation')
parser.add_argument('--unit', '-u', type=int, default=650,
help='number of units')
parser.add_argument('--sample', type=int, default=1,
help='negative value indicates NOT use random choice')
parser.add_argument('--length', type=int, default=20,
help='length of the generated text')
parser.add_argument('--gpu', type=int, default=-1,
help='GPU ID (negative value indicates CPU)')
args = parser.parse_args()
np.random.seed(args.seed)
xp = cuda.cupy if args.gpu >= 0 else np
# load vocabulary
vocab = chainer.datasets.get_ptb_words_vocabulary()
ivocab = {}
for c, i in vocab.items():
ivocab[i] = c
# should be same as n_units , described in train_ptb.py
n_units = args.unit
lm = train_ptb.RNNForLM(len(vocab), n_units, train=False)
model = L.Classifier(lm)
serializers.load_npz(args.model, model)
if args.gpu >= 0:
cuda.get_device(args.gpu).use()
model.to_gpu()
model.predictor.reset_state()
primetext = args.primetext
if isinstance(primetext, six.binary_type):
primetext = primetext.decode('utf-8')
if primetext in vocab:
prev_word = chainer.Variable(xp.array([vocab[primetext]], xp.int32))
else:
print('ERROR: Unfortunately ' + primetext + ' is unknown.')
exit()
prob = F.softmax(model.predictor(prev_word))
sys.stdout.write(primetext + ' ')
for i in six.moves.range(args.length):
prob = F.softmax(model.predictor(prev_word))
if args.sample > 0:
probability = cuda.to_cpu(prob.data)[0].astype(np.float64)
probability /= np.sum(probability)
index = np.random.choice(range(len(probability)), p=probability)
else:
index = np.argmax(cuda.to_cpu(prob.data))
if ivocab[index] == '<eos>':
sys.stdout.write('.')
else:
sys.stdout.write(ivocab[index] + ' ')
prev_word = chainer.Variable(xp.array([index], dtype=xp.int32))
sys.stdout.write('\n')
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
111e184e4618fc600d7bfced3ff6d93feb9e5217 | 975da64ee000833af92148b647ea1bea1deac6fe | /test/test_replace_phone_number_params.py | 188e89fc12c3deeca09f0ee74c12b0cc356e1bbf | [] | no_license | bitlayergit/API-SDK-python | 41c84eadae31556fae818e636565723d3112aa2c | 111b4eb9b725d7a8feb31b8e0af8bb2ee79dcda3 | refs/heads/master | 2021-07-06T08:51:56.984499 | 2017-09-28T15:37:20 | 2017-09-28T15:37:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 987 | py | # coding: utf-8
"""
Phone.com API
This is a Phone.com api Swagger definition
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import swagger_client
from swagger_client.rest import ApiException
from swagger_client.models.replace_phone_number_params import ReplacePhoneNumberParams
class TestReplacePhoneNumberParams(unittest.TestCase):
""" ReplacePhoneNumberParams unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testReplacePhoneNumberParams(self):
"""
Test ReplacePhoneNumberParams
"""
# FIXME: construct object with mandatory attributes with example values
#model = swagger_client.models.replace_phone_number_params.ReplacePhoneNumberParams()
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
5ae8e4f6bff9c4bb0c4b6d58c9158e149983f7ed | 372edad1cd6399cadba82818e9fb9682c3bac1b4 | /packages/python/plotly/plotly/validators/_histogram2d.py | b2bbad238bb208885da6ffff8f69637c6d5e0789 | [
"MIT"
] | permissive | OGVGdev/plotly.py | 78bfa9e25e92c367f0da30af7885cdd163ba612b | 96a9101c79aa588023f56153bf274d0d570ffcf6 | refs/heads/master | 2022-11-10T16:44:06.732450 | 2020-06-26T13:07:06 | 2020-06-26T13:07:06 | 275,173,321 | 1 | 0 | MIT | 2020-06-26T14:19:41 | 2020-06-26T14:19:40 | null | UTF-8 | Python | false | false | 16,417 | py | import _plotly_utils.basevalidators
class Histogram2DValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="histogram2d", parent_name="", **kwargs):
super(Histogram2DValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Histogram2d"),
data_docs=kwargs.pop(
"data_docs",
"""
autobinx
Obsolete: since v1.42 each bin attribute is
auto-determined separately and `autobinx` is
not needed. However, we accept `autobinx: true`
or `false` and will update `xbins` accordingly
before deleting `autobinx` from the trace.
autobiny
Obsolete: since v1.42 each bin attribute is
auto-determined separately and `autobiny` is
not needed. However, we accept `autobiny: true`
or `false` and will update `ybins` accordingly
before deleting `autobiny` from the trace.
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `colorscale`. In case
`colorscale` is unspecified or `autocolorscale`
is true, the default palette will be chosen
according to whether numbers in the `color`
array are all positive, all negative or mixed.
bingroup
Set the `xbingroup` and `ybingroup` default
prefix For example, setting a `bingroup` of 1
on two histogram2d traces will make them their
x-bins and y-bins match separately.
coloraxis
Sets a reference to a shared color axis.
References to these shared color axes are
"coloraxis", "coloraxis2", "coloraxis3", etc.
Settings for these shared color axes are set in
the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple
color scales can be linked to the same color
axis.
colorbar
:class:`plotly.graph_objects.histogram2d.ColorB
ar` instance or dict with compatible properties
colorscale
Sets the colorscale. The colorscale must be an
array containing arrays mapping a normalized
value to an rgb, rgba, hex, hsl, hsv, or named
color string. At minimum, a mapping for the
lowest (0) and highest (1) values are required.
For example, `[[0, 'rgb(0,0,255)'], [1,
'rgb(255,0,0)']]`. To control the bounds of the
colorscale in color space, use`zmin` and
`zmax`. Alternatively, `colorscale` may be a
palette name string of the following list: Grey
s,YlGnBu,Greens,YlOrRd,Bluered,RdBu,Reds,Blues,
Picnic,Rainbow,Portland,Jet,Hot,Blackbody,Earth
,Electric,Viridis,Cividis.
customdata
Assigns extra data each datum. This may be
useful when listening to hover, click and
selection events. Note that, "scatter" traces
also appends customdata items in the markers
DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud
for customdata .
histfunc
Specifies the binning function used for this
histogram trace. If "count", the histogram
values are computed by counting the number of
values lying inside each bin. If "sum", "avg",
"min", "max", the histogram values are computed
using the sum, the average, the minimum or the
maximum of the values lying inside each bin
respectively.
histnorm
Specifies the type of normalization used for
this histogram trace. If "", the span of each
bar corresponds to the number of occurrences
(i.e. the number of data points lying inside
the bins). If "percent" / "probability", the
span of each bar corresponds to the percentage
/ fraction of occurrences with respect to the
total number of sample points (here, the sum of
all bin HEIGHTS equals 100% / 1). If "density",
the span of each bar corresponds to the number
of occurrences in a bin divided by the size of
the bin interval (here, the sum of all bin
AREAS equals the total number of sample
points). If *probability density*, the area of
each bar corresponds to the probability that an
event will fall into the corresponding bin
(here, the sum of all bin AREAS equals 1).
hoverinfo
Determines which trace information appear on
hover. If `none` or `skip` are set, no
information is displayed upon hovering. But, if
`none` is set, click and hover events are still
fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud
for hoverinfo .
hoverlabel
:class:`plotly.graph_objects.histogram2d.Hoverl
abel` instance or dict with compatible
properties
hovertemplate
Template string used for rendering the
information that appear on hover box. Note that
this will override `hoverinfo`. Variables are
inserted using %{variable}, for example "y:
%{y}". Numbers are formatted using d3-format's
syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}".
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
for details on the formatting syntax. Dates are
formatted using d3-time-format's syntax
%{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}".
https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format
for details on the date formatting syntax. The
variables available in `hovertemplate` are the
ones emitted as event data described at this
link https://plotly.com/javascript/plotlyjs-
events/#event-data. Additionally, every
attributes that can be specified per-point (the
ones that are `arrayOk: true`) are available.
variable `z` Anything contained in tag
`<extra>` is displayed in the secondary box,
for example "<extra>{fullData.name}</extra>".
To hide the secondary box completely, use an
empty tag `<extra></extra>`.
hovertemplatesrc
Sets the source reference on Chart Studio Cloud
for hovertemplate .
ids
Assigns id labels to each datum. These ids for
object constancy of data points during
animation. Should be an array of strings, not
numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud
for ids .
legendgroup
Sets the legend group for this trace. Traces
part of the same legend group hide/show at the
same time when toggling legend items.
marker
:class:`plotly.graph_objects.histogram2d.Marker
` instance or dict with compatible properties
meta
Assigns extra meta information associated with
this trace that can be used in various text
attributes. Attributes such as trace `name`,
graph, axis and colorbar `title.text`,
annotation `text` `rangeselector`,
`updatemenues` and `sliders` `label` text all
support `meta`. To access the trace `meta`
values in an attribute in the same trace,
simply use `%{meta[i]}` where `i` is the index
or key of the `meta` item in question. To
access trace `meta` in layout attributes, use
`%{data[n[.meta[i]}` where `i` is the index or
key of the `meta` and `n` is the trace index.
metasrc
Sets the source reference on Chart Studio Cloud
for meta .
name
Sets the trace name. The trace name appear as
the legend item and on hover.
nbinsx
Specifies the maximum number of desired bins.
This value will be used in an algorithm that
will decide the optimal bin size such that the
histogram best visualizes the distribution of
the data. Ignored if `xbins.size` is provided.
nbinsy
Specifies the maximum number of desired bins.
This value will be used in an algorithm that
will decide the optimal bin size such that the
histogram best visualizes the distribution of
the data. Ignored if `ybins.size` is provided.
opacity
Sets the opacity of the trace.
reversescale
Reverses the color mapping if true. If true,
`zmin` will correspond to the last color in the
array and `zmax` will correspond to the first
color.
showlegend
Determines whether or not an item corresponding
to this trace is shown in the legend.
showscale
Determines whether or not a colorbar is
displayed for this trace.
stream
:class:`plotly.graph_objects.histogram2d.Stream
` instance or dict with compatible properties
uid
Assign an id to this trace, Use this to provide
object constancy between traces during
animations and transitions.
uirevision
Controls persistence of some user-driven
changes to the trace: `constraintrange` in
`parcoords` traces, as well as some `editable:
true` modifications such as `name` and
`colorbar.title`. Defaults to
`layout.uirevision`. Note that other user-
driven trace attribute changes are controlled
by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and
`colorbar.(x|y)` (accessible with `config:
{editable: true}`) is controlled by
`layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on
trace index if no `uid` is provided. So if your
app can add/remove traces before the end of the
`data` array, such that the same trace has a
different index, you can still preserve user-
driven changes if you give each trace a `uid`
that stays with it as it moves.
visible
Determines whether or not this trace is
visible. If "legendonly", the trace is not
drawn, but can appear as a legend item
(provided that the legend itself is visible).
x
Sets the sample data to be binned on the x
axis.
xaxis
Sets a reference between this trace's x
coordinates and a 2D cartesian x axis. If "x"
(the default value), the x coordinates refer to
`layout.xaxis`. If "x2", the x coordinates
refer to `layout.xaxis2`, and so on.
xbingroup
Set a group of histogram traces which will have
compatible x-bin settings. Using `xbingroup`,
histogram2d and histogram2dcontour traces (on
axes of the same axis type) can have compatible
x-bin settings. Note that the same `xbingroup`
value can be used to set (1D) histogram
`bingroup`
xbins
:class:`plotly.graph_objects.histogram2d.XBins`
instance or dict with compatible properties
xcalendar
Sets the calendar system to use with `x` date
data.
xgap
Sets the horizontal gap (in pixels) between
bricks.
xsrc
Sets the source reference on Chart Studio Cloud
for x .
y
Sets the sample data to be binned on the y
axis.
yaxis
Sets a reference between this trace's y
coordinates and a 2D cartesian y axis. If "y"
(the default value), the y coordinates refer to
`layout.yaxis`. If "y2", the y coordinates
refer to `layout.yaxis2`, and so on.
ybingroup
Set a group of histogram traces which will have
compatible y-bin settings. Using `ybingroup`,
histogram2d and histogram2dcontour traces (on
axes of the same axis type) can have compatible
y-bin settings. Note that the same `ybingroup`
value can be used to set (1D) histogram
`bingroup`
ybins
:class:`plotly.graph_objects.histogram2d.YBins`
instance or dict with compatible properties
ycalendar
Sets the calendar system to use with `y` date
data.
ygap
Sets the vertical gap (in pixels) between
bricks.
ysrc
Sets the source reference on Chart Studio Cloud
for y .
z
Sets the aggregation data.
zauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `z`) or the bounds set in `zmin` and `zmax`
Defaults to `false` when `zmin` and `zmax` are
set by the user.
zhoverformat
Sets the hover text formatting rule using d3
formatting mini-languages which are very
similar to those in Python. See:
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format
zmax
Sets the upper bound of the color domain. Value
should have the same units as in `z` and if
set, `zmin` must be set as well.
zmid
Sets the mid-point of the color domain by
scaling `zmin` and/or `zmax` to be equidistant
to this point. Value should have the same units
as in `z`. Has no effect when `zauto` is
`false`.
zmin
Sets the lower bound of the color domain. Value
should have the same units as in `z` and if
set, `zmax` must be set as well.
zsmooth
Picks a smoothing algorithm use to smooth `z`
data.
zsrc
Sets the source reference on Chart Studio Cloud
for z .
""",
),
**kwargs
)
| [
"[email protected]"
] | |
148d097216699d2a68a987361655c5f84bc8326b | 248f56b1fb0cb20796e5a29736a37fa4106fa0bb | /2022KAKAOBLIND/solution3.py | ed3658e9ce984ff7a5c5eca0daef151660b3f2a1 | [
"MIT"
] | permissive | KOOKDONGHUN/programmers | b24d672c67180765daca3036c72600ba17e0bd50 | 87ac8fcc23b14b3293c37933e4b9bbf663792830 | refs/heads/main | 2023-08-15T21:44:04.462043 | 2021-10-02T05:35:23 | 2021-10-02T05:35:23 | 401,706,119 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,740 | py | from datetime import datetime
import math
def solution(fees, records):
answer = []
t_ls = []
car_num_ls = []
check_ls = []
car_num_ls2 = []
minute = 60
dead_line = "23:59"
log = dict()
for record in records:
t, car_num, check = record.split()
t_ls.append(t)
car_num_ls.append(car_num)
check_ls.append(check)
if (car_num not in car_num_ls2):
car_num_ls2.append(car_num)
for num in car_num_ls:
log[f'{num}'] = []
for idx in range(len(car_num_ls)):
log[f'{car_num_ls[idx]}'].append(t_ls[idx])
# print(log)
time_log = dict()
for key, value in log.items():
time_log[f'{key}'] = []
l = len(value)
if (l % 2 == 1):
st = ''
for i in range(l-1):
if (i % 2 == 1): # out
a = datetime.strptime(st, '%H:%M')
b = datetime.strptime(value[i], '%H:%M')
c = (b - a).seconds/60
time_log[f'{key}'].append(c)
else: # in
st = value[i]
# 마지막 하나 연산 마감시간까지 하는거
a = datetime.strptime(value[-1], '%H:%M')
b = datetime.strptime(dead_line, '%H:%M')
c = (b - a).seconds / 60
time_log[f'{key}'].append(c)
else:
st = ''
for i in range(l):
if (i % 2 == 1): # out
a = datetime.strptime(st, '%H:%M')
b = datetime.strptime(value[i], '%H:%M')
c = (b - a).seconds / 60
time_log[f'{key}'].append(c)
else: # in
st = value[i]
# print(time_log)
answer2 = dict()
for key, value in time_log.items():
answer2[f'{key}'] = 0
if (sum(value) <= fees[0]):
answer2[f'{key}'] = fees[1]
else:
r1 = int(sum(value)) - fees[0]
r2 = math.ceil(r1 / fees[2])
r3 = r2 * fees[3]
rr = fees[1] + r3
answer2[f'{key}'] = rr
answer = []
car_num_ls2.sort()
for i in car_num_ls2:
answer.append(answer2[i])
# print(answer)
return answer
if __name__ == "__main__":
# execute only if run as a script
solution([180, 5000, 10, 600], ["05:34 5961 IN", "06:00 0000 IN", "06:34 0000 OUT", "07:59 5961 OUT", "07:59 0148 IN", "18:59 0000 IN", "19:09 0148 OUT", "22:59 5961 IN", "23:00 5961 OUT"])
print('-'*38)
solution([120, 0, 60, 591], ["16:00 3961 IN","16:00 0202 IN","18:00 3961 OUT","18:00 0202 OUT","23:58 3961 IN"])
print('-' * 38)
solution([1, 461, 1, 10], ["00:00 1234 IN"]) | [
"[email protected]"
] | |
c43a49ab156f7382d0fb726caf88d66387bbe7c9 | 184310f55b58e854dc3b6c58599ef99bc4c95739 | /hujian_api/API_service/Templight/tt.py | f8dec3403639f1cdd677b97b36c29b6ed379a8ac | [] | no_license | tanjijun1/Python_API | c8585821a627c399fea1ab31bb024be6b82dd3ab | 3c4771875870ffe425d2d39fc28a50449b1752f2 | refs/heads/master | 2023-01-07T23:30:30.284433 | 2020-11-11T08:43:10 | 2020-11-11T08:43:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,203 | py | import time
now = time.time()
print(now)
Content-Type: multipart/form-data; boundary=-----------13418483933
-----------13418483933
Content-Disposition: form-data; name="service_key"
-----------13418483933
Content-Disposition: form-data; name="device_sn"
-----------13418483933
Content-Disposition: form-data; name="service_code"
-----------13418483933
import sys
reload(sys)
sys.path.append('./python2.7/site-packages')
sys.path.append('./python2.7/site-packages/requests_toolbelt-0.8.0-py2.7.egg')
print
sys.path
import urllib2
import urllib
import cookielib
import json
import httplib
import re
import requests
import random
from requests_toolbelt import MultipartEncoder
if len(sys.argv) != 7:
print
sys.argv[
0] + ' ' + 'deploy_name' + ' ' + 'apk_name' + ' ' + 'promptInfo' + ' ' + 'versionDesc' + ' ' + 'versionLargeNumber' + ' ' + 'applications.id'
sys.exit()
deploy_name = sys.argv[1]
apk_name = sys.argv[2]
promptInfo = sys.argv[3]
versionDesc = sys.argv[4]
versionLargeNumber = sys.argv[5]
applications = sys.argv[6]
j = 10
id = []
id = ''.join(str(i) for i in random.sample(range(0, 11), j)) # sample(seq, n) 从序列seq中选择n个随机且独立的元素;
print
id
s = requests.session()
print
s.headers
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:54.0) Gecko/20100101 Firefox/54.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Host': '10.4.160.88:8080',
'Referer': 'http://10.4.160.88:8080/nqsky-meap-manager/index',
}
login_url = 'http://10.4.160.88:8080/nqsky-meap-manager/login'
data = {'csrf': '', '_csrf_header': '', 'userName': 'admin', 'password': '1'}
# data = urllib.urlencode(data)
response = s.post(login_url, data=data, headers=headers)
# print response
# print response.status_code
# print response.content
url = 'http://10.4.160.88:8080/nqsky-meap-manager/main/applications/applications/list'
r = s.get(url, headers=headers)
r = r.text
# print r
# headers = {
#
# 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:54.0) Gecko/20100101 Firefox/54.0',
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Refer':'http://10.4.160.88:8080/nqsky-meap-manager/main/applications/appInfo/list/0/8a8420d85f809d23015f93fa25590d8c',
# 'Content-Type': "multipart/form-data; 'boundary=---------------------------%s" %(id)
# }
print
headers
uploadurl = 'http://10.4.160.88:8080/nqsky-meap-manager/main/applications/appVersion/save'
# data={'versionName':'w213','deviceType':'1','status':'1','versionSystem':'1.0','platformType':'1','promptInfo':'publish','versionDesc':'','appUrl':'','versionLargeNumber':'1.1','versionLargeFile':'checkping.pl','largeFile':'','enforceStatus':'1','snapshotImg':'','snapshotFile':'','snapshotName':'','versionType':'','applications.id':'8a8420d85f809d23015f93fa25590d8c','id':'','auditStatus':'0','appOrder':'1','isPortal':'','deviceAuthority':'','technologyType':'3'}
arr1 = ['', '', '', '', '', '']
jsonstr = json.dumps(arr1)
m = MultipartEncoder(
fields={
"versionName": (None, deploy_name),
"deviceType": (None, "1"),
"status": (None, "1"),
"versionSystem": (None, "1.5"),
"platformType": (None, "1"),
"promptInfo": (None, promptInfo),
"versionDesc": (None, versionDesc),
"versionLargeNumber": (None, versionLargeNumber),
"versionLargeFile": "apk_name",
"largeFile": (apk_name, open(apk_name, 'rb'), 'application/octet-stream'),
"enforceStatus": (None, "1"),
"applications.id": (None, applications),
"auditStatus": (None, "0"),
"appOrder": (None, "2"),
"technologyType": (None, "3"),
"snapshotImg": (None, jsonstr),
"snapshotFile": (None, jsonstr),
"snapshotName": (None, jsonstr)
}
)
print
m
response = s.post(uploadurl, data=m, headers={'Content-Type': m.content_type})
print
'------------------------------------------------------'
print
response
print
response.url
print
response.status_code
# print response.content
if response.status_code == 200:
print
'deploy success'
else:
print
'deploy failed'
| [
"[email protected]"
] | |
b5892daa2cf40600c3c171d16b4d33890e637c5d | 3e381dc0a265afd955e23c85dce1e79e2b1c5549 | /hi-A3/not_hesaplama.py | 57a517a822a6edc22adf30f92c4369c4d4857351 | [] | no_license | serkancam/byfp2-2020-2021 | 3addeb92a3ff5616cd6dbd3ae7b2673e1a1a1a5e | c67206bf5506239d967c3b1ba75f9e08fdbad162 | refs/heads/master | 2023-05-05T04:36:21.525621 | 2021-05-29T11:56:27 | 2021-05-29T11:56:27 | 322,643,962 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 773 | py | dosya = open(file="notlar.txt",mode="r",encoding="utf-8")
metin = dosya.read()
dosya.close()
donem_ortalamasi=0.0
not_adet=0
for satir in metin.split("\n"):
ders_adi=satir.split(":")[0].strip()
notlar = satir.split(":")[1].strip().split(" ")
ortalama=0.0
for nt in notlar:
ortalama = ortalama + float(nt)
donem_ortalamasi=donem_ortalamasi+float(nt)
not_adet=not_adet+1
ortalama = ortalama / len(notlar)
print(ders_adi,"ortalamasi=",round(ortalama,2))
donem_ortalamasi = donem_ortalamasi/not_adet
print("dönem ortalaması=",round(donem_ortalamasi,2))
"""
Matematik ortalaması= 75
Türkçe ortalaması=87.5
Sosyal bilgiler ortalaması=95
Fen bilimleri ortalaması=97.5
İngilizce ortalaması=82.5
Dönem ortalaması=?
""" | [
"[email protected]"
] | |
56364384fadab6ba2a8360d6af96ab70e100b200 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_006/ch3_2020_03_09_20_04_20_580816.py | d72a113c6ad8ecb6326c442c0399dc5570cc0f35 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py | def calcula_gaussiana(x, mi, sigma):
resultado=(1/(sigma*((2*math.pi)**0.5)))*(math.e**(-0.5(((x-mi)/sigma)**2))
return resultado | [
"[email protected]"
] | |
1b7ee83080c836f6c279d09bbbe341b44e72147b | 8dcd3ee098b4f5b80879c37a62292f42f6b2ae17 | /venv/Lib/site-packages/win32/Demos/CopyFileEx.py | 4aa24f877a44e08b33f9917690f24b5da1da55db | [] | no_license | GregVargas1999/InfinityAreaInfo | 53fdfefc11c4af8f5d2b8f511f7461d11a3f7533 | 2e4a7c6a2424514ca0ec58c9153eb08dc8e09a4a | refs/heads/master | 2022-12-01T20:26:05.388878 | 2020-08-11T18:37:05 | 2020-08-11T18:37:05 | 286,821,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,164 | py | import os
import win32api
import win32file
def ProgressRoutine(TotalFileSize, TotalBytesTransferred, StreamSize, StreamBytesTransferred,
StreamNumber, CallbackReason, SourceFile, DestinationFile, Data):
print(Data)
print(TotalFileSize, TotalBytesTransferred, StreamSize, StreamBytesTransferred, StreamNumber, CallbackReason,
SourceFile, DestinationFile)
##if TotalBytesTransferred > 100000:
## return win32file.PROGRESS_STOP
return win32file.PROGRESS_CONTINUE
temp_dir = win32api.GetTempPath()
fsrc = win32api.GetTempFileName(temp_dir, 'cfe')[0]
fdst = win32api.GetTempFileName(temp_dir, 'cfe')[0]
print(fsrc, fdst)
f = open(fsrc, 'w')
f.write('xxxxxxxxxxxxxxxx\n' * 32768)
f.close()
## add a couple of extra data streams
f = open(fsrc + ':stream_y', 'w')
f.write('yyyyyyyyyyyyyyyy\n' * 32768)
f.close()
f = open(fsrc + ':stream_z', 'w')
f.write('zzzzzzzzzzzzzzzz\n' * 32768)
f.close()
operation_desc = 'Copying ' + fsrc + ' to ' + fdst
win32file.CopyFileEx(fsrc, fdst, ProgressRoutine, Data=operation_desc, Cancel=False,
CopyFlags=win32file.COPY_FILE_RESTARTABLE, Transaction=None)
| [
"[email protected]"
] | |
55fd8f6731ac12dc4c43cec0146a397ec6063002 | 9028516ff0b2d95b8000b9fc4c44c29aa73c926c | /qa/rpc-tests/listtransactions.py | 2aa4910074d08c894f8f572dd90a502a1019280f | [
"MIT"
] | permissive | lycion/TripOne | a9e546eac9ad6179c0b6bd4f868162f70930b6ac | c6ae7d9163ef4095fe0e143d26f3311182551147 | refs/heads/master | 2020-03-28T22:29:06.119551 | 2018-09-18T06:07:06 | 2018-09-18T06:07:06 | 149,236,186 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,913 | py | #!/usr/bin/env python3
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Copyright (c) 2015-2017 The Bitcoin Unlimited developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import test_framework.loginit
# Exercise the listtransactions API
import pdb
from test_framework.test_framework import TriponeTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, COIN
from io import BytesIO
def txFromHex(hexstring):
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(hexstring))
tx.deserialize(f)
return tx
class ListTransactionsTest(TriponeTestFramework):
def setup_nodes(self):
enable_mocktime()
return start_nodes(4, self.options.tmpdir)
def run_test(self):
self.test_listtransactionsfrom()
self.test_listtransactions()
def test_listtransactionsfrom(self):
# Simple send, 0 to 1:
self.sync_all()
tmp = self.nodes[2].listtransactionsfrom("*", 10000, 0)
curpos = len(tmp)
txid = self.nodes[2].sendtoaddress(self.nodes[3].getnewaddress(), 0.1)
self.sync_all()
self.nodes[0].generate(1)
self.sync_blocks()
# Basic positive test
tmp = self.nodes[2].listtransactionsfrom("*", 1, curpos)
assert len(tmp) == 1
assert tmp[0]["txid"] == txid
tmp = self.nodes[2].listtransactionsfrom("*", 10, curpos)
assert len(tmp) == 1
# Negative tests
# test beyond end of tx list
tmp = self.nodes[2].listtransactionsfrom("*", 100, curpos + 100)
assert(len(tmp) == 0)
# test bad input values
try:
tmp = self.nodes[2].listtransactionsfrom("*", -1, curpos)
assert 0
except JSONRPCException:
pass
try:
tmp = self.nodes[2].listtransactionsfrom("*", 100, -1)
assert 0
except JSONRPCException:
pass
# test multiple rows
curpos += 1
txidsA = [self.nodes[2].sendtoaddress(self.nodes[3].getnewaddress(), 0.2), self.nodes[2].sendtoaddress(
self.nodes[3].getnewaddress(), 0.3), self.nodes[2].sendtoaddress(self.nodes[3].getnewaddress(), 0.4)]
self.sync_all()
self.nodes[0].generate(1)
self.sync_blocks()
tmp = self.nodes[2].listtransactionsfrom("*", 100, curpos)
assert len(tmp) == 3
assert tmp[0]["txid"] == txidsA[0]
assert tmp[1]["txid"] == txidsA[1]
assert tmp[2]["txid"] == txidsA[2]
txidsB = [self.nodes[2].sendtoaddress(self.nodes[3].getnewaddress(), 0.5), self.nodes[2].sendtoaddress(
self.nodes[3].getnewaddress(), 0.6), self.nodes[2].sendtoaddress(self.nodes[3].getnewaddress(), 0.7)]
tmp = self.nodes[2].listtransactionsfrom("*", 100, curpos)
assert len(tmp) == 6
assert tmp[0]["txid"] == txidsA[0]
assert tmp[1]["txid"] == txidsA[1]
assert tmp[2]["txid"] == txidsA[2]
assert tmp[3]["txid"] == txidsB[0]
assert tmp[4]["txid"] == txidsB[1]
assert tmp[5]["txid"] == txidsB[2]
# test when I advance to the end, I get nothing
curpos += len(tmp)
tmp = self.nodes[2].listtransactionsfrom("*", 100, curpos)
assert tmp == []
def test_listtransactions(self):
# Simple send, 0 to 1:
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid},
{"category": "send", "account": "", "amount": Decimal("-0.1"), "confirmations": 0})
assert_array_result(self.nodes[1].listtransactions(),
{"txid": txid},
{"category": "receive", "account": "", "amount": Decimal("0.1"), "confirmations": 0})
# mine a block, confirmations should change:
self.nodes[0].generate(1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid},
{"category": "send", "account": "", "amount": Decimal("-0.1"), "confirmations": 1})
assert_array_result(self.nodes[1].listtransactions(),
{"txid": txid},
{"category": "receive", "account": "", "amount": Decimal("0.1"), "confirmations": 1})
# send-to-self:
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid, "category": "send"},
{"amount": Decimal("-0.2")})
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid, "category": "receive"},
{"amount": Decimal("0.2")})
# sendmany from node1: twice to self, twice to node2:
send_to = {self.nodes[0].getnewaddress(): 0.11,
self.nodes[1].getnewaddress(): 0.22,
self.nodes[0].getaccountaddress("from1"): 0.33,
self.nodes[1].getaccountaddress("toself"): 0.44}
txid = self.nodes[1].sendmany("", send_to)
self.sync_all()
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.11")},
{"txid": txid})
assert_array_result(self.nodes[0].listtransactions(),
{"category": "receive", "amount": Decimal("0.11")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.22")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "receive", "amount": Decimal("0.22")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.33")},
{"txid": txid})
assert_array_result(self.nodes[0].listtransactions(),
{"category": "receive", "amount": Decimal("0.33")},
{"txid": txid, "account": "from1"})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.44")},
{"txid": txid, "account": ""})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "receive", "amount": Decimal("0.44")},
{"txid": txid, "account": "toself"})
multisig = self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
self.nodes[1].generate(1)
self.sync_all()
assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0)
assert_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True),
{"category": "receive", "amount": Decimal("0.1")},
{"txid": txid, "account": "watchonly"})
if __name__ == '__main__':
ListTransactionsTest().main()
def Test():
t = ListTransactionsTest()
triponeConf = {
"debug": ["all"],
"blockprioritysize": 2000000 # we don't want any transactions rejected due to insufficient fees...
}
# "--tmpdir=/ramdisk/test", "--nocleanup", "--noshutdown"
t.main([], triponeConf, None)
| [
"[email protected]"
] | |
621333841f77d8f450e89b6da4802214a1f72bd0 | f0066a2eb7b2f92d7c04dc314af6be320724c614 | /nova/tests/unit/virt/libvirt/test_vif.py | 19be56c65db380d64cd184e26a58870aeed718fe | [
"Apache-2.0"
] | permissive | hyphon81/nova-for-gpu-passthrough | 80392ea7462ade8457e77843482387d8f6593797 | 7c164980d7355d8fc40a6b155e31e325191b6a5e | refs/heads/master | 2021-01-20T14:10:38.016142 | 2017-02-10T08:03:45 | 2017-02-10T08:03:45 | 82,746,438 | 0 | 1 | Apache-2.0 | 2020-07-24T00:41:48 | 2017-02-22T01:31:23 | Python | UTF-8 | Python | false | false | 77,088 | py | # Copyright 2012 Nicira, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import os
import fixtures
from lxml import etree
import mock
import os_vif
from os_vif import exception as osv_exception
from os_vif import objects as osv_objects
from oslo_concurrency import processutils
from oslo_config import cfg
import six
from nova import exception
from nova.network import linux_net
from nova.network import model as network_model
from nova import objects
from nova.pci import utils as pci_utils
from nova import test
from nova.tests.unit import matchers
from nova.tests.unit.virt import fakelibosinfo
from nova import utils
from nova.virt.libvirt import config as vconfig
from nova.virt.libvirt import host
from nova.virt.libvirt import vif
CONF = cfg.CONF
class LibvirtVifTestCase(test.NoDBTestCase):
gateway_bridge_4 = network_model.IP(address='101.168.1.1', type='gateway')
dns_bridge_4 = network_model.IP(address='8.8.8.8', type=None)
ips_bridge_4 = [network_model.IP(address='101.168.1.9', type=None)]
subnet_bridge_4 = network_model.Subnet(cidr='101.168.1.0/24',
dns=[dns_bridge_4],
gateway=gateway_bridge_4,
routes=None,
dhcp_server='191.168.1.1')
gateway_bridge_6 = network_model.IP(address='101:1db9::1', type='gateway')
subnet_bridge_6 = network_model.Subnet(cidr='101:1db9::/64',
dns=None,
gateway=gateway_bridge_6,
ips=None,
routes=None)
network_bridge = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge='br0',
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface='eth0',
vlan=99, mtu=9000)
vif_bridge = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_BRIDGE,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid=None)
network_bridge_neutron = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge=None,
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface='eth0',
vlan=99)
vif_bridge_neutron = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge_neutron,
type=None,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
network_ovs = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge='br0',
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface=None,
vlan=99, mtu=1000)
network_ivs = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge='br0',
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface=None,
vlan=99)
vif_ovs = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=network_model.VIF_TYPE_OVS,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ovs_hybrid = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=network_model.VIF_TYPE_OVS,
details={'ovs_hybrid_plug': True,
'port_filter': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ovs_filter_cap = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=network_model.VIF_TYPE_OVS,
details={'port_filter': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ovs_legacy = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=None,
devname=None,
ovs_interfaceid=None)
vif_ivs = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ivs,
type=network_model.VIF_TYPE_IVS,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ivs_legacy = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=None,
devname=None,
ovs_interfaceid='aaa')
vif_ivs_filter_direct = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ivs,
type=network_model.VIF_TYPE_IVS,
details={'port_filter': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ivs_filter_hybrid = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ivs,
type=network_model.VIF_TYPE_IVS,
details={
'port_filter': True,
'ovs_hybrid_plug': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_none = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=None,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid=None)
network_8021 = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge=None,
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
interface='eth0',
vlan=99)
vif_8021qbh = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_802_QBH,
vnic_type=network_model.VNIC_TYPE_DIRECT,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_PROFILEID:
'MyPortProfile'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_hw_veb = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_HW_VEB,
vnic_type=network_model.VNIC_TYPE_DIRECT,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_VLAN: '100'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_hostdev_physical = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_HOSTDEV,
vnic_type=
network_model.VNIC_TYPE_DIRECT_PHYSICAL,
ovs_interfaceid=None,
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_hw_veb_macvtap = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_HW_VEB,
vnic_type=network_model.VNIC_TYPE_MACVTAP,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_VLAN: '100'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_8021qbg = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_802_QBG,
ovs_interfaceid=None,
qbg_params=network_model.VIF8021QbgParams(
managerid="xxx-yyy-zzz",
typeid="aaa-bbb-ccc",
typeidversion="1",
instanceid="ddd-eee-fff"))
network_midonet = network_model.Network(id='network-id-xxx-yyy-zzz',
label=None,
bridge=None,
subnets=[subnet_bridge_4],
interface='eth0')
network_vrouter = network_model.Network(id='network-id-xxx-yyy-zzz',
label=None,
bridge=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
interface='eth0')
vif_vrouter = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_vrouter,
type=network_model.VIF_TYPE_VROUTER,
devname='tap-xxx-yyy-zzz')
vif_ib_hostdev = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_IB_HOSTDEV,
vnic_type=network_model.VNIC_TYPE_DIRECT,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_VLAN: '100'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_midonet = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_midonet,
type=network_model.VIF_TYPE_MIDONET,
devname='tap-xxx-yyy-zzz')
vif_tap = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
type=network_model.VIF_TYPE_TAP,
devname='tap-xxx-yyy-zzz')
vif_iovisor = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_IOVISOR,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid=None)
vif_vhostuser = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {network_model.VIF_DETAILS_VHOSTUSER_MODE: 'client',
network_model.VIF_DETAILS_VHOSTUSER_SOCKET:
'/tmp/vif-xxx-yyy-zzz'}
)
vif_vhostuser_fp = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
devname='tap-xxx-yyy-zzz',
details = {network_model.VIF_DETAILS_VHOSTUSER_SOCKET:
'/tmp/usv-xxx-yyy-zzz',
network_model.VIF_DETAILS_VHOSTUSER_FP_PLUG: True},
)
vif_vhostuser_ovs = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {network_model.VIF_DETAILS_VHOSTUSER_MODE: 'client',
network_model.VIF_DETAILS_VHOSTUSER_SOCKET:
'/tmp/usv-xxx-yyy-zzz',
network_model.VIF_DETAILS_VHOSTUSER_OVS_PLUG: True},
ovs_interfaceid='aaa-bbb-ccc', mtu=1500
)
vif_vhostuser_ovs_fp = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {network_model.VIF_DETAILS_VHOSTUSER_MODE: 'server',
network_model.VIF_DETAILS_VHOSTUSER_SOCKET:
'/tmp/usv-xxx-yyy-zzz',
network_model.VIF_DETAILS_VHOSTUSER_FP_PLUG: True,
network_model.VIF_DETAILS_VHOSTUSER_OVS_PLUG: True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc'
)
vif_vhostuser_ovs_fp_hybrid = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {'ovs_hybrid_plug': True,
network_model.VIF_DETAILS_VHOSTUSER_MODE: 'server',
network_model.VIF_DETAILS_VHOSTUSER_SOCKET:
'/tmp/usv-xxx-yyy-zzz',
network_model.VIF_DETAILS_VHOSTUSER_OVS_PLUG: True,
network_model.VIF_DETAILS_VHOSTUSER_FP_PLUG: True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc'
)
vif_vhostuser_no_path = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {network_model.VIF_DETAILS_VHOSTUSER_MODE: 'client'})
vif_macvtap_vlan = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_MACVTAP,
details={network_model.VIF_DETAILS_VLAN: '1',
network_model.VIF_DETAILS_PHYS_INTERFACE: 'eth0',
network_model.VIF_DETAILS_MACVTAP_SOURCE: 'eth0.1',
network_model.VIF_DETAILS_MACVTAP_MODE: 'vepa'})
vif_macvtap_flat = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_MACVTAP,
details={network_model.VIF_DETAILS_PHYS_INTERFACE: 'eth0',
network_model.VIF_DETAILS_MACVTAP_SOURCE: 'eth0',
network_model.VIF_DETAILS_MACVTAP_MODE: 'bridge'})
vif_macvtap_exception = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_MACVTAP)
instance = objects.Instance(id=1,
uuid='f0000000-0000-0000-0000-000000000001',
project_id=723)
bandwidth = {
'quota:vif_inbound_peak': '200',
'quota:vif_outbound_peak': '20',
'quota:vif_inbound_average': '100',
'quota:vif_outbound_average': '10',
'quota:vif_inbound_burst': '300',
'quota:vif_outbound_burst': '30'
}
os_vif_network = osv_objects.network.Network(
id="b82c1929-051e-481d-8110-4669916c7915",
label="Demo Net",
subnets=osv_objects.subnet.SubnetList(
objects=[]))
os_vif_bridge = osv_objects.vif.VIFBridge(
id="dc065497-3c8d-4f44-8fb4-e1d33c16a536",
address="22:52:25:62:e2:aa",
plugin="linux_bridge",
vif_name="nicdc065497-3c",
bridge_name="br100",
has_traffic_filtering=False,
network=os_vif_network)
os_vif_ovs_prof = osv_objects.vif.VIFPortProfileOpenVSwitch(
interface_id="07bd6cea-fb37-4594-b769-90fc51854ee9",
profile_id="fishfood")
os_vif_ovs = osv_objects.vif.VIFOpenVSwitch(
id="dc065497-3c8d-4f44-8fb4-e1d33c16a536",
address="22:52:25:62:e2:aa",
unplugin="linux_bridge",
vif_name="nicdc065497-3c",
bridge_name="br0",
port_profile=os_vif_ovs_prof,
network=os_vif_network)
os_vif_ovs_hybrid = osv_objects.vif.VIFBridge(
id="dc065497-3c8d-4f44-8fb4-e1d33c16a536",
address="22:52:25:62:e2:aa",
unplugin="linux_bridge",
vif_name="nicdc065497-3c",
bridge_name="br0",
port_profile=os_vif_ovs_prof,
has_traffic_filtering=False,
network=os_vif_network)
os_vif_inst_info = osv_objects.instance_info.InstanceInfo(
uuid="d5b1090c-9e00-4fa4-9504-4b1494857970",
name="instance-000004da",
project_id="2f37d7f6-e51a-4a1f-8b6e-b0917ffc8390")
def setUp(self):
super(LibvirtVifTestCase, self).setUp()
self.flags(allow_same_net_traffic=True)
self.executes = []
def fake_execute(*cmd, **kwargs):
self.executes.append(cmd)
return None, None
self.stub_out('nova.utils.execute', fake_execute)
def _get_node(self, xml):
doc = etree.fromstring(xml)
ret = doc.findall('./devices/interface')
self.assertEqual(len(ret), 1)
return ret[0]
def _assertMacEquals(self, node, vif):
mac = node.find("mac").get("address")
self.assertEqual(mac, vif['address'])
def _assertTypeEquals(self, node, type, attr, source, br_want,
prefix=None):
self.assertEqual(node.get("type"), type)
br_name = node.find(attr).get(source)
if prefix is None:
self.assertEqual(br_name, br_want)
else:
self.assertTrue(br_name.startswith(prefix))
def _assertTypeAndMacEquals(self, node, type, attr, source, vif,
br_want=None, size=0, prefix=None):
ret = node.findall("filterref")
self.assertEqual(len(ret), size)
self._assertTypeEquals(node, type, attr, source, br_want,
prefix)
self._assertMacEquals(node, vif)
def _assertModel(self, xml, model_want=None, driver_want=None):
node = self._get_node(xml)
if model_want is None:
ret = node.findall("model")
self.assertEqual(len(ret), 0)
else:
model = node.find("model").get("type")
self.assertEqual(model, model_want)
if driver_want is None:
ret = node.findall("driver")
self.assertEqual(len(ret), 0)
else:
driver = node.find("driver").get("name")
self.assertEqual(driver, driver_want)
def _assertTypeAndPciEquals(self, node, type, vif):
self.assertEqual(node.get("type"), type)
self._assertPciEqual(node, vif, type="pci")
def _assertPciEqual(self, node, vif, type=None):
address = node.find("source").find("address")
if type:
addr_type = address.get("type")
self.assertEqual(type, addr_type)
pci_slot = "%(domain)s:%(bus)s:%(slot)s.%(func)s" % {
'domain': address.get("domain")[2:],
'bus': address.get("bus")[2:],
'slot': address.get("slot")[2:],
'func': address.get("function")[2:]}
pci_slot_want = vif['profile']['pci_slot']
self.assertEqual(pci_slot, pci_slot_want)
def _assertXmlEqual(self, expectedXmlstr, actualXmlstr):
self.assertThat(actualXmlstr, matchers.XMLMatches(expectedXmlstr))
def _get_conf(self):
conf = vconfig.LibvirtConfigGuest()
conf.virt_type = "qemu"
conf.name = "fake-name"
conf.uuid = "fake-uuid"
conf.memory = 100 * 1024
conf.vcpus = 4
return conf
def _get_instance_xml(self, driver, vif, image_meta=None, flavor=None):
if flavor is None:
flavor = objects.Flavor(name='m1.small',
memory_mb=128,
vcpus=1,
root_gb=0,
ephemeral_gb=0,
swap=0,
extra_specs=dict(self.bandwidth),
deleted_at=None,
deleted=0,
created_at=None, flavorid=1,
is_public=True, vcpu_weight=None,
id=2, disabled=False, rxtx_factor=1.0)
conf = self._get_conf()
hostimpl = host.Host("qemu:///system")
nic = driver.get_config(self.instance, vif, image_meta,
flavor, CONF.libvirt.virt_type,
hostimpl)
conf.add_device(nic)
return conf.to_xml()
def _test_virtio_multiqueue(self, vcpus, want_queues):
self.flags(use_virtio_for_bridges=True,
virt_type='kvm',
group='libvirt')
flavor = objects.Flavor(name='m1.small',
memory_mb=128,
vcpus=vcpus,
root_gb=0,
ephemeral_gb=0,
swap=0,
deleted_at=None,
deleted=0,
created_at=None, flavorid=1,
is_public=True, vcpu_weight=None,
id=2, disabled=False, rxtx_factor=1.0)
d = vif.LibvirtGenericVIFDriver()
image_meta = objects.ImageMeta.from_dict(
{'properties': {'hw_vif_model': 'virtio',
'hw_vif_multiqueue_enabled': 'true'}})
xml = self._get_instance_xml(d, self.vif_bridge,
image_meta, flavor)
node = self._get_node(xml)
driver = node.find("driver").get("name")
self.assertEqual(driver, 'vhost')
queues = node.find("driver").get("queues")
self.assertEqual(queues, want_queues)
def test_virtio_multiqueue(self):
self._test_virtio_multiqueue(4, '4')
@mock.patch('os.uname', return_value=('Linux', '', '2.6.32-21-generic'))
def test_virtio_multiqueue_in_kernel_2(self, mock_uname):
self._test_virtio_multiqueue(10, '1')
@mock.patch('os.uname', return_value=('Linux', '', '3.19.0-47-generic'))
def test_virtio_multiqueue_in_kernel_3(self, mock_uname):
self._test_virtio_multiqueue(10, '8')
@mock.patch('os.uname', return_value=('Linux', '', '4.2.0-35-generic'))
def test_virtio_multiqueue_in_kernel_4(self, mock_uname):
self._test_virtio_multiqueue(10, '10')
def test_multiple_nics(self):
conf = self._get_conf()
# Tests multiple nic configuration and that target_dev is
# set for each
nics = [{'net_type': 'bridge',
'mac_addr': '00:00:00:00:00:0b',
'source_dev': 'b_source_dev',
'target_dev': 'b_target_dev'},
{'net_type': 'ethernet',
'mac_addr': '00:00:00:00:00:0e',
'source_dev': 'e_source_dev',
'target_dev': 'e_target_dev'},
{'net_type': 'direct',
'mac_addr': '00:00:00:00:00:0d',
'source_dev': 'd_source_dev',
'target_dev': 'd_target_dev'}]
for nic in nics:
nic_conf = vconfig.LibvirtConfigGuestInterface()
nic_conf.net_type = nic['net_type']
nic_conf.target_dev = nic['target_dev']
nic_conf.mac_addr = nic['mac_addr']
nic_conf.source_dev = nic['source_dev']
conf.add_device(nic_conf)
xml = conf.to_xml()
doc = etree.fromstring(xml)
for nic in nics:
path = "./devices/interface/[@type='%s']" % nic['net_type']
node = doc.find(path)
self.assertEqual(nic['net_type'], node.get("type"))
self.assertEqual(nic['mac_addr'],
node.find("mac").get("address"))
self.assertEqual(nic['target_dev'],
node.find("target").get("dev"))
def test_model_novirtio(self):
self.flags(use_virtio_for_bridges=False,
virt_type='kvm',
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_bridge)
self._assertModel(xml)
def test_model_kvm(self):
self.flags(use_virtio_for_bridges=True,
virt_type='kvm',
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_bridge)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
def test_model_kvm_qemu_custom(self):
for virt in ('kvm', 'qemu'):
self.flags(use_virtio_for_bridges=True,
virt_type=virt,
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
supported = (network_model.VIF_MODEL_NE2K_PCI,
network_model.VIF_MODEL_PCNET,
network_model.VIF_MODEL_RTL8139,
network_model.VIF_MODEL_E1000,
network_model.VIF_MODEL_SPAPR_VLAN)
for model in supported:
image_meta = objects.ImageMeta.from_dict(
{'properties': {'hw_vif_model': model}})
xml = self._get_instance_xml(d, self.vif_bridge,
image_meta)
self._assertModel(xml, model)
@mock.patch.object(vif.designer, 'set_vif_guest_frontend_config')
def test_model_with_osinfo(self, mock_set):
self.flags(use_virtio_for_bridges=True,
virt_type='kvm',
group='libvirt')
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.osinfo.libosinfo',
fakelibosinfo))
d = vif.LibvirtGenericVIFDriver()
image_meta = {'properties': {'os_name': 'fedora22'}}
image_meta = objects.ImageMeta.from_dict(image_meta)
d.get_base_config(None, 'ca:fe:de:ad:be:ef', image_meta,
None, 'kvm')
mock_set.assert_called_once_with(mock.ANY, 'ca:fe:de:ad:be:ef',
'virtio', None, None)
def _test_model_qemu(self, *vif_objs, **kw):
libvirt_version = kw.get('libvirt_version')
self.flags(use_virtio_for_bridges=True,
virt_type='qemu',
group='libvirt')
for vif_obj in vif_objs:
d = vif.LibvirtGenericVIFDriver()
if libvirt_version is not None:
d.libvirt_version = libvirt_version
xml = self._get_instance_xml(d, vif_obj)
doc = etree.fromstring(xml)
bandwidth = doc.find('./devices/interface/bandwidth')
self.assertNotEqual(bandwidth, None)
inbound = bandwidth.find('inbound')
self.assertEqual(inbound.get("average"),
self.bandwidth['quota:vif_inbound_average'])
self.assertEqual(inbound.get("peak"),
self.bandwidth['quota:vif_inbound_peak'])
self.assertEqual(inbound.get("burst"),
self.bandwidth['quota:vif_inbound_burst'])
outbound = bandwidth.find('outbound')
self.assertEqual(outbound.get("average"),
self.bandwidth['quota:vif_outbound_average'])
self.assertEqual(outbound.get("peak"),
self.bandwidth['quota:vif_outbound_peak'])
self.assertEqual(outbound.get("burst"),
self.bandwidth['quota:vif_outbound_burst'])
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO, "qemu")
def test_model_qemu_no_firewall(self):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
self._test_model_qemu(
self.vif_bridge,
self.vif_8021qbg,
self.vif_iovisor,
self.vif_ovs,
)
def test_model_qemu_iptables(self):
self.flags(firewall_driver="nova.virt.firewall.IptablesFirewallDriver")
self._test_model_qemu(
self.vif_bridge,
self.vif_ovs,
self.vif_ivs,
self.vif_8021qbg,
self.vif_iovisor
)
def test_model_xen(self):
self.flags(use_virtio_for_bridges=True,
virt_type='xen',
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_bridge)
self._assertModel(xml)
def test_generic_driver_none(self):
d = vif.LibvirtGenericVIFDriver()
self.assertRaises(exception.NovaException,
self._get_instance_xml,
d,
self.vif_none)
def _check_bridge_driver(self, d, vif, br_want):
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_bridge, br_want, 1)
def test_generic_driver_bridge(self):
d = vif.LibvirtGenericVIFDriver()
self._check_bridge_driver(d,
self.vif_bridge,
self.vif_bridge['network']['bridge'])
def _check_ivs_ethernet_driver(self, d, vif, dev_prefix):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_ivs, prefix=dev_prefix)
script = node.find("script").get("path")
self.assertEqual(script, "")
def test_unplug_ivs_ethernet(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(linux_net, 'delete_ivs_vif_port') as delete:
delete.side_effect = processutils.ProcessExecutionError
d.unplug(self.instance, self.vif_ivs)
@mock.patch.object(utils, 'execute')
@mock.patch.object(pci_utils, 'get_ifname_by_pci_address')
@mock.patch.object(pci_utils, 'get_vf_num_by_pci_address', return_value=1)
def _test_hw_veb_op(self, op, vlan, mock_get_vf_num, mock_get_ifname,
mock_execute):
mock_get_ifname.side_effect = ['eth1', 'eth13']
exit_code = [0, 2, 254]
port_state = 'up' if vlan > 0 else 'down'
calls = {
'get_ifname':
[mock.call(self.vif_hw_veb_macvtap['profile']['pci_slot'],
pf_interface=True),
mock.call(self.vif_hw_veb_macvtap['profile']['pci_slot'])],
'get_vf_num':
[mock.call(self.vif_hw_veb_macvtap['profile']['pci_slot'])],
'execute': [mock.call('ip', 'link', 'set', 'eth1',
'vf', 1, 'mac',
self.vif_hw_veb_macvtap['address'],
'vlan', vlan,
run_as_root=True,
check_exit_code=exit_code),
mock.call('ip', 'link', 'set',
'eth13', port_state,
run_as_root=True,
check_exit_code=exit_code)]
}
op(self.instance, self.vif_hw_veb_macvtap)
mock_get_ifname.assert_has_calls(calls['get_ifname'])
mock_get_vf_num.assert_has_calls(calls['get_vf_num'])
mock_execute.assert_has_calls(calls['execute'])
def test_plug_hw_veb(self):
d = vif.LibvirtGenericVIFDriver()
self._test_hw_veb_op(
d.plug,
self.vif_hw_veb_macvtap['details'][network_model.VIF_DETAILS_VLAN])
def test_unplug_hw_veb(self):
d = vif.LibvirtGenericVIFDriver()
self._test_hw_veb_op(d.unplug, 0)
def test_plug_ivs_hybrid(self):
calls = {
'device_exists': [mock.call('qbrvif-xxx-yyy'),
mock.call('qvovif-xxx-yyy')],
'_create_veth_pair': [mock.call('qvbvif-xxx-yyy',
'qvovif-xxx-yyy', None)],
'execute': [mock.call('brctl', 'addbr', 'qbrvif-xxx-yyy',
run_as_root=True),
mock.call('brctl', 'setfd', 'qbrvif-xxx-yyy', 0,
run_as_root=True),
mock.call('brctl', 'stp', 'qbrvif-xxx-yyy', 'off',
run_as_root=True),
mock.call('tee', ('/sys/class/net/qbrvif-xxx-yyy'
'/bridge/multicast_snooping'),
process_input='0', run_as_root=True,
check_exit_code=[0, 1]),
mock.call('tee', ('/proc/sys/net/ipv6/conf'
'/qbrvif-xxx-yyy/disable_ipv6'),
process_input='1', run_as_root=True,
check_exit_code=[0, 1]),
mock.call('ip', 'link', 'set', 'qbrvif-xxx-yyy', 'up',
run_as_root=True),
mock.call('brctl', 'addif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True)],
'create_ivs_vif_port': [mock.call('qvovif-xxx-yyy', 'aaa-bbb-ccc',
'ca:fe:de:ad:be:ef',
'f0000000-0000-0000-0000-000000000001')]
}
with test.nested(
mock.patch.object(linux_net, 'device_exists',
return_value=False),
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, '_create_veth_pair'),
mock.patch.object(linux_net, 'create_ivs_vif_port'),
mock.patch.object(os.path, 'exists', return_value=True)
) as (device_exists, execute, _create_veth_pair, create_ivs_vif_port,
path_exists):
d = vif.LibvirtGenericVIFDriver()
d.plug(self.instance, self.vif_ivs)
device_exists.assert_has_calls(calls['device_exists'])
_create_veth_pair.assert_has_calls(calls['_create_veth_pair'])
execute.assert_has_calls(calls['execute'])
create_ivs_vif_port.assert_has_calls(calls['create_ivs_vif_port'])
def test_unplug_ivs_hybrid(self):
calls = {
'execute': [mock.call('brctl', 'delif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True),
mock.call('ip', 'link', 'set',
'qbrvif-xxx-yyy', 'down', run_as_root=True),
mock.call('brctl', 'delbr',
'qbrvif-xxx-yyy', run_as_root=True)],
'delete_ivs_vif_port': [mock.call('qvovif-xxx-yyy')]
}
with test.nested(
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, 'delete_ivs_vif_port')
) as (execute, delete_ivs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.unplug(self.instance, self.vif_ivs)
execute.assert_has_calls(calls['execute'])
delete_ivs_vif_port.assert_has_calls(calls['delete_ivs_vif_port'])
def test_unplug_ivs_hybrid_bridge_does_not_exist(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
execute.side_effect = processutils.ProcessExecutionError
d.unplug(self.instance, self.vif_ivs)
def test_unplug_iovisor(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
execute.side_effect = processutils.ProcessExecutionError
d.unplug(self.instance, self.vif_iovisor)
@mock.patch('nova.network.linux_net.device_exists')
def test_plug_iovisor(self, device_exists):
device_exists.return_value = True
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
d.plug(self.instance, self.vif_iovisor)
execute.assert_has_calls([
mock.call('ifc_ctl', 'gateway', 'add_port',
'tap-xxx-yyy-zzz', run_as_root=True),
mock.call('ifc_ctl', 'gateway', 'ifup',
'tap-xxx-yyy-zzz',
'access_vm', self.vif_iovisor['id'],
self.vif_iovisor['address'],
'pgtag2=%s' % self.vif_iovisor['network']['id'],
'pgtag1=%s' % self.instance.project_id,
run_as_root=True)])
def test_unplug_vrouter_with_details(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
d.unplug(self.instance, self.vif_vrouter)
execute.assert_called_once_with(
'vrouter-port-control',
'--oper=delete --uuid=vif-xxx-yyy-zzz',
run_as_root=True)
def test_plug_vrouter_with_details(self):
d = vif.LibvirtGenericVIFDriver()
instance = mock.Mock()
instance.name = 'instance-name'
instance.uuid = '46a4308b-e75a-4f90-a34a-650c86ca18b2'
instance.project_id = 'b168ea26fa0c49c1a84e1566d9565fa5'
instance.display_name = 'instance1'
with mock.patch.object(utils, 'execute') as execute:
d.plug(instance, self.vif_vrouter)
execute.assert_has_calls([
mock.call('ip', 'tuntap', 'add', 'tap-xxx-yyy-zzz', 'mode',
'tap', run_as_root=True, check_exit_code=[0, 2, 254]),
mock.call('ip', 'link', 'set', 'tap-xxx-yyy-zzz', 'up',
run_as_root=True, check_exit_code=[0, 2, 254]),
mock.call('vrouter-port-control',
'--oper=add --uuid=vif-xxx-yyy-zzz '
'--instance_uuid=46a4308b-e75a-4f90-a34a-650c86ca18b2 '
'--vn_uuid=network-id-xxx-yyy-zzz '
'--vm_project_uuid=b168ea26fa0c49c1a84e1566d9565fa5 '
'--ip_address=0.0.0.0 '
'--ipv6_address=None '
'--vm_name=instance1 '
'--mac=ca:fe:de:ad:be:ef '
'--tap_name=tap-xxx-yyy-zzz '
'--port_type=NovaVMPort '
'--tx_vlan_id=-1 '
'--rx_vlan_id=-1', run_as_root=True)])
def test_ivs_ethernet_driver(self):
d = vif.LibvirtGenericVIFDriver()
self._check_ivs_ethernet_driver(d,
self.vif_ivs,
"tap")
def _check_ivs_virtualport_driver(self, d, vif, want_iface_id):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
vif, vif['devname'])
def _check_ovs_virtualport_driver(self, d, vif, want_iface_id):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
vif, "br0")
vp = node.find("virtualport")
self.assertEqual(vp.get("type"), "openvswitch")
iface_id_found = False
for p_elem in vp.findall("parameters"):
iface_id = p_elem.get("interfaceid", None)
if iface_id:
self.assertEqual(iface_id, want_iface_id)
iface_id_found = True
self.assertTrue(iface_id_found)
def test_generic_ovs_virtualport_driver(self):
d = vif.LibvirtGenericVIFDriver()
want_iface_id = self.vif_ovs['ovs_interfaceid']
self._check_ovs_virtualport_driver(d,
self.vif_ovs,
want_iface_id)
def test_generic_ivs_virtualport_driver(self):
d = vif.LibvirtGenericVIFDriver()
want_iface_id = self.vif_ivs['ovs_interfaceid']
self._check_ivs_virtualport_driver(d,
self.vif_ivs,
want_iface_id)
def test_ivs_plug_with_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ivs['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
xml = self._get_instance_xml(d, self.vif_ivs)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_ivs, br_want, 1)
def test_ivs_plug_with_port_filter_direct_no_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ivs_filter_hybrid['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, self.vif_ivs_filter_hybrid)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_ivs_filter_hybrid, br_want, 0)
def test_ivs_plug_with_port_filter_hybrid_no_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = self.vif_ivs_filter_direct['devname']
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, self.vif_ivs_filter_direct)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_ivs_filter_direct, br_want, 0)
def test_hybrid_plug_without_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ovs_hybrid['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, self.vif_ovs_hybrid)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_ovs_hybrid, br_want, 0)
def test_direct_plug_with_port_filter_cap_no_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = self.vif_midonet['devname']
xml = self._get_instance_xml(d, self.vif_ovs_filter_cap)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "target", "dev",
self.vif_ovs_filter_cap, br_want)
def _check_neutron_hybrid_driver(self, d, vif, br_want):
self.flags(firewall_driver="nova.virt.firewall.IptablesFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
vif, br_want, 1)
def test_generic_hybrid_driver(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ovs['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self._check_neutron_hybrid_driver(d,
self.vif_ovs,
br_want)
def test_ivs_hybrid_driver(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ivs['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self._check_neutron_hybrid_driver(d,
self.vif_ivs,
br_want)
def test_ib_hostdev_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_ib_hostdev)
doc = etree.fromstring(xml)
node = doc.findall('./devices/hostdev')[0]
self.assertEqual(1, len(node))
self._assertPciEqual(node, self.vif_ib_hostdev)
def test_midonet_ethernet_vif_driver(self):
d = vif.LibvirtGenericVIFDriver()
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
br_want = self.vif_midonet['devname']
xml = self._get_instance_xml(d, self.vif_midonet)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_midonet, br_want)
def test_tap_ethernet_vif_driver(self):
d = vif.LibvirtGenericVIFDriver()
br_want = self.vif_tap['devname']
xml = self._get_instance_xml(d, self.vif_tap)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_tap, br_want)
@mock.patch('nova.network.linux_net.device_exists')
def test_plug_tap(self, device_exists):
device_exists.return_value = True
d = vif.LibvirtGenericVIFDriver()
d.plug(self.instance, self.vif_tap)
def test_unplug_tap(self):
d = vif.LibvirtGenericVIFDriver()
d.unplug(self.instance, self.vif_tap)
def test_generic_8021qbh_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_8021qbh)
node = self._get_node(xml)
self._assertTypeAndPciEquals(node, "hostdev", self.vif_8021qbh)
self._assertMacEquals(node, self.vif_8021qbh)
vp = node.find("virtualport")
self.assertEqual(vp.get("type"), "802.1Qbh")
profile_id_found = False
for p_elem in vp.findall("parameters"):
details = self.vif_8021qbh["details"]
profile_id = p_elem.get("profileid", None)
if profile_id:
self.assertEqual(profile_id,
details[network_model.VIF_DETAILS_PROFILEID])
profile_id_found = True
self.assertTrue(profile_id_found)
def test_hw_veb_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_hw_veb)
node = self._get_node(xml)
self._assertTypeAndPciEquals(node, "hostdev", self.vif_hw_veb)
self._assertMacEquals(node, self.vif_hw_veb)
vlan = node.find("vlan").find("tag").get("id")
vlan_want = self.vif_hw_veb["details"]["vlan"]
self.assertEqual(vlan, vlan_want)
def test_hostdev_physical_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_hostdev_physical)
doc = etree.fromstring(xml)
node = doc.findall('./devices/hostdev')[0]
self.assertEqual(1, len(node))
self._assertPciEqual(node, self.vif_hostdev_physical)
@mock.patch.object(pci_utils, 'get_ifname_by_pci_address',
return_value='eth1')
def test_hw_veb_driver_macvtap(self, mock_get_ifname):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_hw_veb_macvtap)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth1")
self._assertTypeEquals(node, "direct", "source",
"mode", "passthrough")
self._assertMacEquals(node, self.vif_hw_veb_macvtap)
vlan = node.find("vlan")
self.assertIsNone(vlan)
def test_driver_macvtap_vlan(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_macvtap_vlan)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth0.1")
self._assertTypeEquals(node, "direct", "source",
"mode", "vepa")
self._assertMacEquals(node, self.vif_macvtap_vlan)
def test_driver_macvtap_flat(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_macvtap_flat)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth0")
self._assertTypeEquals(node, "direct", "source",
"mode", "bridge")
self._assertMacEquals(node, self.vif_macvtap_flat)
def test_driver_macvtap_exception(self):
d = vif.LibvirtGenericVIFDriver()
e = self.assertRaises(exception.VifDetailsMissingMacvtapParameters,
self._get_instance_xml,
d,
self.vif_macvtap_exception)
self.assertIn('macvtap_source', six.text_type(e))
self.assertIn('macvtap_mode', six.text_type(e))
self.assertIn('physical_interface', six.text_type(e))
@mock.patch.object(linux_net.LinuxBridgeInterfaceDriver, 'ensure_vlan')
def test_macvtap_plug_vlan(self, ensure_vlan_mock):
d = vif.LibvirtGenericVIFDriver()
d.plug(self.instance, self.vif_macvtap_vlan)
ensure_vlan_mock.assert_called_once_with('1', 'eth0',
interface='eth0.1')
@mock.patch.object(linux_net.LinuxBridgeInterfaceDriver, 'ensure_vlan')
def test_macvtap_plug_flat(self, ensure_vlan_mock):
d = vif.LibvirtGenericVIFDriver()
d.plug(self.instance, self.vif_macvtap_flat)
self.assertFalse(ensure_vlan_mock.called)
def test_generic_iovisor_driver(self):
d = vif.LibvirtGenericVIFDriver()
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
br_want = self.vif_ivs['devname']
xml = self._get_instance_xml(d, self.vif_ivs)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_ivs, br_want)
def test_generic_8021qbg_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_8021qbg)
node = self._get_node(xml)
self._assertTypeEquals(node, "direct", "source", "dev", "eth0")
self._assertMacEquals(node, self.vif_8021qbg)
vp = node.find("virtualport")
self.assertEqual(vp.get("type"), "802.1Qbg")
manager_id_found = False
type_id_found = False
typeversion_id_found = False
instance_id_found = False
for p_elem in vp.findall("parameters"):
wantparams = self.vif_8021qbg['qbg_params']
manager_id = p_elem.get("managerid", None)
type_id = p_elem.get("typeid", None)
typeversion_id = p_elem.get("typeidversion", None)
instance_id = p_elem.get("instanceid", None)
if manager_id:
self.assertEqual(manager_id,
wantparams['managerid'])
manager_id_found = True
if type_id:
self.assertEqual(type_id,
wantparams['typeid'])
type_id_found = True
if typeversion_id:
self.assertEqual(typeversion_id,
wantparams['typeidversion'])
typeversion_id_found = True
if instance_id:
self.assertEqual(instance_id,
wantparams['instanceid'])
instance_id_found = True
self.assertTrue(manager_id_found)
self.assertTrue(type_id_found)
self.assertTrue(typeversion_id_found)
self.assertTrue(instance_id_found)
def test_vhostuser_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_vhostuser)
node = self._get_node(xml)
self.assertEqual(node.get("type"),
network_model.VIF_TYPE_VHOSTUSER)
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "mode", "client")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "path", "/tmp/vif-xxx-yyy-zzz")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "type", "unix")
self._assertMacEquals(node, self.vif_vhostuser)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
def test_vhostuser_no_queues(self):
d = vif.LibvirtGenericVIFDriver()
image_meta = objects.ImageMeta.from_dict(
{'properties': {'hw_vif_model': 'virtio',
'hw_vif_multiqueue_enabled': 'true'}})
xml = self._get_instance_xml(d, self.vif_vhostuser, image_meta)
node = self._get_node(xml)
self.assertEqual(node.get("type"),
network_model.VIF_TYPE_VHOSTUSER)
self._assertMacEquals(node, self.vif_vhostuser)
driver = node.find("driver")
self.assertIsNone(driver, None)
def test_vhostuser_driver_no_path(self):
d = vif.LibvirtGenericVIFDriver()
self.assertRaises(exception.VifDetailsMissingVhostuserSockPath,
self._get_instance_xml,
d,
self.vif_vhostuser_no_path)
def test_vhostuser_driver_ovs(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d,
self.vif_vhostuser_ovs)
node = self._get_node(xml)
self.assertEqual(node.get("type"),
network_model.VIF_TYPE_VHOSTUSER)
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "mode", "client")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "path", "/tmp/usv-xxx-yyy-zzz")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "type", "unix")
self._assertMacEquals(node, self.vif_vhostuser_ovs)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
@mock.patch.object(linux_net, 'create_fp_dev')
def test_vhostuser_fp_plug(self, mock_create_fp_dev):
d = vif.LibvirtGenericVIFDriver()
d.plug(self.instance, self.vif_vhostuser_fp)
mock_create_fp_dev.assert_has_calls(
[mock.call('tap-xxx-yyy-zzz', '/tmp/usv-xxx-yyy-zzz', 'client')])
@mock.patch.object(linux_net, 'delete_fp_dev')
def test_vhostuser_fp_unplug(self, mock_delete_fp_dev):
d = vif.LibvirtGenericVIFDriver()
d.unplug(self.instance, self.vif_vhostuser_fp)
mock_delete_fp_dev.assert_has_calls([mock.call('tap-xxx-yyy-zzz')])
def test_vhostuser_ovs_plug(self):
calls = {
'create_ovs_vif_port': [
mock.call(
'br0', 'usv-xxx-yyy-zzz',
'aaa-bbb-ccc', 'ca:fe:de:ad:be:ef',
'f0000000-0000-0000-0000-000000000001', 9000,
interface_type=network_model.OVS_VHOSTUSER_INTERFACE_TYPE
)]
}
with mock.patch.object(linux_net,
'create_ovs_vif_port') as create_ovs_vif_port:
d = vif.LibvirtGenericVIFDriver()
d.plug(self.instance, self.vif_vhostuser_ovs)
create_ovs_vif_port.assert_has_calls(calls['create_ovs_vif_port'])
def test_vhostuser_ovs_unplug(self):
calls = {
'delete_ovs_vif_port': [mock.call('br0', 'usv-xxx-yyy-zzz')]
}
with mock.patch.object(linux_net,
'delete_ovs_vif_port') as delete_port:
d = vif.LibvirtGenericVIFDriver()
d.unplug(self.instance, self.vif_vhostuser_ovs)
delete_port.assert_has_calls(calls['delete_ovs_vif_port'])
def test_vhostuser_ovs_fp_plug(self):
calls = {
'create_fp_dev': [mock.call('tap-xxx-yyy-zzz',
'/tmp/usv-xxx-yyy-zzz',
'client')],
'create_ovs_vif_port': [mock.call(
'br0', 'tap-xxx-yyy-zzz',
'aaa-bbb-ccc', 'ca:fe:de:ad:be:ef',
'f0000000-0000-0000-0000-000000000001',
9000)]
}
with test.nested(
mock.patch.object(linux_net, 'create_fp_dev'),
mock.patch.object(linux_net, 'create_ovs_vif_port'),
) as (create_fp_dev, create_ovs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.plug_vhostuser(self.instance, self.vif_vhostuser_ovs_fp)
create_fp_dev.assert_has_calls(calls['create_fp_dev'])
create_ovs_vif_port.assert_has_calls(calls['create_ovs_vif_port'])
def test_vhostuser_ovs_fp_unplug(self):
calls = {
'delete_ovs_vif_port': [mock.call('br0', 'tap-xxx-yyy-zzz',
False)],
'delete_fp_dev': [mock.call('tap-xxx-yyy-zzz')],
}
with test.nested(
mock.patch.object(linux_net, 'delete_ovs_vif_port'),
mock.patch.object(linux_net, 'delete_fp_dev')
) as (delete_ovs_port, delete_fp_dev):
d = vif.LibvirtGenericVIFDriver()
d.unplug_vhostuser(None, self.vif_vhostuser_ovs_fp)
delete_ovs_port.assert_has_calls(calls['delete_ovs_vif_port'])
delete_fp_dev.assert_has_calls(calls['delete_fp_dev'])
def test_vhostuser_ovs_fp_hybrid_plug(self):
calls = {
'create_fp_dev': [mock.call('tap-xxx-yyy-zzz',
'/tmp/usv-xxx-yyy-zzz',
'client')],
'device_exists': [mock.call('tap-xxx-yyy-zzz'),
mock.call('qbrvif-xxx-yyy'),
mock.call('qvovif-xxx-yyy')],
'_create_veth_pair': [mock.call('qvbvif-xxx-yyy',
'qvovif-xxx-yyy', 9000)],
'execute': [mock.call('brctl', 'addbr', 'qbrvif-xxx-yyy',
run_as_root=True),
mock.call('brctl', 'setfd', 'qbrvif-xxx-yyy', 0,
run_as_root=True),
mock.call('brctl', 'stp', 'qbrvif-xxx-yyy', 'off',
run_as_root=True),
mock.call('tee', ('/sys/class/net/qbrvif-xxx-yyy'
'/bridge/multicast_snooping'),
process_input='0', run_as_root=True,
check_exit_code=[0, 1]),
mock.call('ip', 'link', 'set', 'qbrvif-xxx-yyy', 'up',
run_as_root=True),
mock.call('brctl', 'addif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True),
mock.call('brctl', 'addif', 'qbrvif-xxx-yyy',
'tap-xxx-yyy-zzz', run_as_root=True)],
'create_ovs_vif_port': [mock.call(
'br0', 'qvovif-xxx-yyy',
'aaa-bbb-ccc', 'ca:fe:de:ad:be:ef',
'f0000000-0000-0000-0000-000000000001',
9000)]
}
with test.nested(
mock.patch.object(linux_net, 'create_fp_dev'),
mock.patch.object(linux_net, 'device_exists',
return_value=False),
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, '_create_veth_pair'),
mock.patch.object(linux_net, 'create_ovs_vif_port')
) as (create_fp_dev, device_exists, execute, _create_veth_pair,
create_ovs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.plug_vhostuser(self.instance, self.vif_vhostuser_ovs_fp_hybrid)
create_fp_dev.assert_has_calls(calls['create_fp_dev'])
device_exists.assert_has_calls(calls['device_exists'])
_create_veth_pair.assert_has_calls(calls['_create_veth_pair'])
execute.assert_has_calls(calls['execute'])
create_ovs_vif_port.assert_has_calls(calls['create_ovs_vif_port'])
def test_vhostuser_ovs_fp_hybrid_unplug(self):
calls = {
'device_exists': [mock.call('qbrvif-xxx-yyy')],
'execute': [mock.call('brctl', 'delif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True),
mock.call('ip', 'link', 'set',
'qbrvif-xxx-yyy', 'down', run_as_root=True),
mock.call('brctl', 'delbr',
'qbrvif-xxx-yyy', run_as_root=True)],
'delete_ovs_vif_port': [mock.call('br0', 'qvovif-xxx-yyy')],
'delete_fp_dev': [mock.call('tap-xxx-yyy-zzz')]
}
with test.nested(
mock.patch.object(linux_net, 'device_exists',
return_value=True),
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, 'delete_ovs_vif_port'),
mock.patch.object(linux_net, 'delete_fp_dev')
) as (device_exists, execute, delete_ovs_vif_port, delete_fp_dev):
d = vif.LibvirtGenericVIFDriver()
d.unplug_vhostuser(None, self.vif_vhostuser_ovs_fp_hybrid)
device_exists.assert_has_calls(calls['device_exists'])
execute.assert_has_calls(calls['execute'])
delete_ovs_vif_port.assert_has_calls(calls['delete_ovs_vif_port'])
delete_fp_dev.assert_has_calls(calls['delete_fp_dev'])
@mock.patch("nova.network.os_vif_util.nova_to_osvif_instance")
@mock.patch("nova.network.os_vif_util.nova_to_osvif_vif")
@mock.patch.object(os_vif, "plug")
def _test_osvif_plug(self, fail, mock_plug,
mock_convert_vif, mock_convert_inst):
mock_convert_vif.return_value = self.os_vif_bridge
mock_convert_inst.return_value = self.os_vif_inst_info
d = vif.LibvirtGenericVIFDriver()
if fail:
mock_plug.side_effect = osv_exception.ExceptionBase("Wibble")
self.assertRaises(exception.NovaException,
d.plug,
self.instance, self.vif_bridge)
else:
d.plug(self.instance, self.vif_bridge)
mock_plug.assert_called_once_with(self.os_vif_bridge,
self.os_vif_inst_info)
def test_osvif_plug_normal(self):
self._test_osvif_plug(False)
def test_osvif_plug_fail(self):
self._test_osvif_plug(True)
@mock.patch("nova.network.os_vif_util.nova_to_osvif_instance")
@mock.patch("nova.network.os_vif_util.nova_to_osvif_vif")
@mock.patch.object(os_vif, "unplug")
def _test_osvif_unplug(self, fail, mock_unplug,
mock_convert_vif, mock_convert_inst):
mock_convert_vif.return_value = self.os_vif_bridge
mock_convert_inst.return_value = self.os_vif_inst_info
d = vif.LibvirtGenericVIFDriver()
if fail:
mock_unplug.side_effect = osv_exception.ExceptionBase("Wibble")
self.assertRaises(exception.NovaException,
d.unplug,
self.instance, self.vif_bridge)
else:
d.unplug(self.instance, self.vif_bridge)
mock_unplug.assert_called_once_with(self.os_vif_bridge,
self.os_vif_inst_info)
def test_osvif_unplug_normal(self):
self._test_osvif_unplug(False)
def test_osvif_unplug_fail(self):
self._test_osvif_unplug(True)
@mock.patch("nova.network.os_vif_util.nova_to_osvif_instance")
@mock.patch("nova.network.os_vif_util.nova_to_osvif_vif")
def test_config_os_vif_bridge(self, mock_convert_vif, mock_convert_inst):
mock_convert_vif.return_value = self.os_vif_bridge
mock_convert_inst.return_value = self.os_vif_inst_info
d = vif.LibvirtGenericVIFDriver()
hostimpl = host.Host("qemu:///system")
flavor = objects.Flavor(name='m1.small')
image_meta = objects.ImageMeta.from_dict({})
d = vif.LibvirtGenericVIFDriver()
cfg = d.get_config(self.instance, self.vif_bridge,
image_meta, flavor,
CONF.libvirt.virt_type,
hostimpl)
self._assertXmlEqual("""
<interface type="bridge">
<mac address="22:52:25:62:e2:aa"/>
<model type="virtio"/>
<source bridge="br100"/>
<target dev="nicdc065497-3c"/>
<filterref
filter="nova-instance-instance-00000001-22522562e2aa"/>
</interface>""", cfg.to_xml())
@mock.patch("nova.network.os_vif_util.nova_to_osvif_instance")
@mock.patch("nova.network.os_vif_util.nova_to_osvif_vif")
def test_config_os_vif_bridge_nofw(self, mock_convert_vif,
mock_convert_inst):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
mock_convert_vif.return_value = self.os_vif_bridge
mock_convert_inst.return_value = self.os_vif_inst_info
d = vif.LibvirtGenericVIFDriver()
hostimpl = host.Host("qemu:///system")
flavor = objects.Flavor(name='m1.small')
image_meta = objects.ImageMeta.from_dict({})
d = vif.LibvirtGenericVIFDriver()
cfg = d.get_config(self.instance, self.vif_bridge,
image_meta, flavor,
CONF.libvirt.virt_type,
hostimpl)
self._assertXmlEqual("""
<interface type="bridge">
<mac address="22:52:25:62:e2:aa"/>
<model type="virtio"/>
<source bridge="br100"/>
<target dev="nicdc065497-3c"/>
</interface>""", cfg.to_xml())
@mock.patch("nova.network.os_vif_util.nova_to_osvif_instance")
@mock.patch("nova.network.os_vif_util.nova_to_osvif_vif")
def test_config_os_vif_ovs(self, mock_convert_vif, mock_convert_inst):
mock_convert_vif.return_value = self.os_vif_ovs
mock_convert_inst.return_value = self.os_vif_inst_info
d = vif.LibvirtGenericVIFDriver()
hostimpl = host.Host("qemu:///system")
flavor = objects.Flavor(name='m1.small')
image_meta = objects.ImageMeta.from_dict({})
d = vif.LibvirtGenericVIFDriver()
cfg = d.get_config(self.instance, self.vif_ovs,
image_meta, flavor,
CONF.libvirt.virt_type,
hostimpl)
self._assertXmlEqual("""
<interface type="bridge">
<mac address="22:52:25:62:e2:aa"/>
<model type="virtio"/>
<source bridge="br0"/>
<target dev="nicdc065497-3c"/>
<virtualport type="openvswitch">
<parameters
interfaceid="07bd6cea-fb37-4594-b769-90fc51854ee9"/>
</virtualport>
</interface>""", cfg.to_xml())
@mock.patch("nova.network.os_vif_util.nova_to_osvif_instance")
@mock.patch("nova.network.os_vif_util.nova_to_osvif_vif")
def test_config_os_vif_ovs_hybrid(self, mock_convert_vif,
mock_convert_inst):
mock_convert_vif.return_value = self.os_vif_ovs_hybrid
mock_convert_inst.return_value = self.os_vif_inst_info
d = vif.LibvirtGenericVIFDriver()
hostimpl = host.Host("qemu:///system")
flavor = objects.Flavor(name='m1.small')
image_meta = objects.ImageMeta.from_dict({})
d = vif.LibvirtGenericVIFDriver()
cfg = d.get_config(self.instance, self.vif_ovs,
image_meta, flavor,
CONF.libvirt.virt_type,
hostimpl)
self._assertXmlEqual("""
<interface type="bridge">
<mac address="22:52:25:62:e2:aa"/>
<model type="virtio"/>
<source bridge="br0"/>
<target dev="nicdc065497-3c"/>
<filterref
filter="nova-instance-instance-00000001-22522562e2aa"/>
</interface>""", cfg.to_xml())
@mock.patch('nova.network.linux_net._set_device_mtu')
@mock.patch("nova.network.os_vif_util.nova_to_osvif_instance")
@mock.patch("nova.network.os_vif_util.nova_to_osvif_vif")
@mock.patch.object(os_vif, "plug")
def test_plug_ovs_vif_no_mtu(self, mock_plug,
mock_convert_vif, mock_convert_inst,
mock_set_mtu):
mock_convert_vif.return_value = self.os_vif_bridge
mock_convert_inst.return_value = self.os_vif_inst_info
d = vif.LibvirtGenericVIFDriver()
# Hack the network mtu in the vif_bridge object - make sure to copy it
# so we don't change state on a global object during a test run.
vif_bridge = copy.deepcopy(self.vif_bridge)
vif_bridge['network']._set_meta({'mtu': None})
d.plug(self.instance, vif_bridge)
self.assertFalse(mock_set_mtu.called)
@mock.patch('nova.network.linux_net._set_device_mtu')
@mock.patch("nova.network.os_vif_util.nova_to_osvif_instance")
@mock.patch("nova.network.os_vif_util.nova_to_osvif_vif")
@mock.patch.object(os_vif, "plug")
def test_plug_ovs_vif_mtu(self, mock_plug,
mock_convert_vif, mock_convert_inst,
mock_set_mtu):
# Hack port profile to say ovs, just like ovn
os_vif_bridge = copy.deepcopy(self.os_vif_bridge)
os_vif_bridge.port_profile = self.os_vif_ovs_prof
mock_convert_vif.return_value = os_vif_bridge
mock_convert_inst.return_value = self.os_vif_inst_info
d = vif.LibvirtGenericVIFDriver()
d.plug(self.instance, self.vif_bridge)
self.assertEqual(3, mock_set_mtu.call_count)
mock_set_mtu.assert_any_call("br0", 9000)
mock_set_mtu.assert_any_call("qvbdc065497-3c", 9000)
mock_set_mtu.assert_any_call("qvodc065497-3c", 9000)
@mock.patch('nova.network.linux_net._set_device_mtu')
@mock.patch("nova.network.os_vif_util.nova_to_osvif_instance")
@mock.patch("nova.network.os_vif_util.nova_to_osvif_vif")
@mock.patch.object(os_vif, "plug")
def test_plug_ovs_vif_no_mtu_venv(self, mock_plug,
mock_convert_vif, mock_convert_inst,
mock_set_mtu):
mock_convert_vif.return_value = self.os_vif_ovs
mock_convert_inst.return_value = self.os_vif_inst_info
d = vif.LibvirtGenericVIFDriver()
d.plug(self.instance, self.vif_ovs)
self.assertEqual(1, mock_set_mtu.call_count)
mock_set_mtu.assert_any_call("br0", 1000)
| [
"[email protected]"
] | |
9727f8d73ce82c5146d695bcfb555813b543ff79 | fa82dad9e83206d4630a55141bf44f50cbf0c3a8 | /day1_python/01_python200_src/022.py | 164a540eabd104f453b2caa203a01db62f7e7434 | [] | no_license | jsh2333/pyml | 8f8c53a43af23b8490b25f35f28d85f1087df28d | 157dfa7cc2f1458f12e451691a994ac6ef138cab | refs/heads/master | 2021-03-27T22:26:38.254206 | 2020-04-26T06:35:11 | 2020-04-26T06:35:11 | 249,114,580 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | a = True
b = False
print(a == 1) # True가 출력됨
print(b != 0) # False가 출력됨
| [
"[email protected]"
] | |
ada46b035a399992d1b92db668b619ceb6f7a703 | 35e00d1996515ccf3151067ff28ff3357078f0b6 | /google/pubsub_v1/services/subscriber/transports/base.py | d50b8baf683cbde3f253dee4f766efed2d53690a | [
"Apache-2.0"
] | permissive | googleapis/python-pubsub | 5bb18674307bd89236a61c0d7c5079f10e19467e | 1b9724324c58d27bcee42020b751cda58d80fddb | refs/heads/main | 2023-09-03T13:14:22.894233 | 2023-08-28T13:18:36 | 2023-08-28T13:18:36 | 226,992,581 | 321 | 195 | Apache-2.0 | 2023-09-10T23:29:10 | 2019-12-10T00:09:52 | Python | UTF-8 | Python | false | false | 19,653 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
from google.pubsub_v1 import gapic_version as package_version
import google.auth # type: ignore
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from google.pubsub_v1.types import pubsub
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
client_library_version=package_version.__version__
)
class SubscriberTransport(abc.ABC):
"""Abstract transport class for Subscriber."""
AUTH_SCOPES = (
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/pubsub",
)
DEFAULT_HOST: str = "pubsub.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# Don't apply audience if the credentials file passed from user.
if hasattr(credentials, "with_gdch_audience"):
credentials = credentials.with_gdch_audience(
api_audience if api_audience else host
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.create_subscription: gapic_v1.method.wrap_method(
self.create_subscription,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.Aborted,
core_exceptions.ServiceUnavailable,
core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.get_subscription: gapic_v1.method.wrap_method(
self.get_subscription,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.Aborted,
core_exceptions.ServiceUnavailable,
core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.update_subscription: gapic_v1.method.wrap_method(
self.update_subscription,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.list_subscriptions: gapic_v1.method.wrap_method(
self.list_subscriptions,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.Aborted,
core_exceptions.ServiceUnavailable,
core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.delete_subscription: gapic_v1.method.wrap_method(
self.delete_subscription,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.modify_ack_deadline: gapic_v1.method.wrap_method(
self.modify_ack_deadline,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.acknowledge: gapic_v1.method.wrap_method(
self.acknowledge,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.pull: gapic_v1.method.wrap_method(
self.pull,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.Aborted,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.streaming_pull: gapic_v1.method.wrap_method(
self.streaming_pull,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=4.0,
predicate=retries.if_exception_type(
core_exceptions.Aborted,
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ResourceExhausted,
core_exceptions.ServiceUnavailable,
),
deadline=900.0,
),
default_timeout=900.0,
client_info=client_info,
),
self.modify_push_config: gapic_v1.method.wrap_method(
self.modify_push_config,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.get_snapshot: gapic_v1.method.wrap_method(
self.get_snapshot,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.Aborted,
core_exceptions.ServiceUnavailable,
core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.list_snapshots: gapic_v1.method.wrap_method(
self.list_snapshots,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.Aborted,
core_exceptions.ServiceUnavailable,
core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.create_snapshot: gapic_v1.method.wrap_method(
self.create_snapshot,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.update_snapshot: gapic_v1.method.wrap_method(
self.update_snapshot,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.delete_snapshot: gapic_v1.method.wrap_method(
self.delete_snapshot,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
self.seek: gapic_v1.method.wrap_method(
self.seek,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.Aborted,
core_exceptions.ServiceUnavailable,
core_exceptions.Unknown,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def create_subscription(
self,
) -> Callable[
[pubsub.Subscription],
Union[pubsub.Subscription, Awaitable[pubsub.Subscription]],
]:
raise NotImplementedError()
@property
def get_subscription(
self,
) -> Callable[
[pubsub.GetSubscriptionRequest],
Union[pubsub.Subscription, Awaitable[pubsub.Subscription]],
]:
raise NotImplementedError()
@property
def update_subscription(
self,
) -> Callable[
[pubsub.UpdateSubscriptionRequest],
Union[pubsub.Subscription, Awaitable[pubsub.Subscription]],
]:
raise NotImplementedError()
@property
def list_subscriptions(
self,
) -> Callable[
[pubsub.ListSubscriptionsRequest],
Union[
pubsub.ListSubscriptionsResponse,
Awaitable[pubsub.ListSubscriptionsResponse],
],
]:
raise NotImplementedError()
@property
def delete_subscription(
self,
) -> Callable[
[pubsub.DeleteSubscriptionRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def modify_ack_deadline(
self,
) -> Callable[
[pubsub.ModifyAckDeadlineRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def acknowledge(
self,
) -> Callable[
[pubsub.AcknowledgeRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]]
]:
raise NotImplementedError()
@property
def pull(
self,
) -> Callable[
[pubsub.PullRequest], Union[pubsub.PullResponse, Awaitable[pubsub.PullResponse]]
]:
raise NotImplementedError()
@property
def streaming_pull(
self,
) -> Callable[
[pubsub.StreamingPullRequest],
Union[pubsub.StreamingPullResponse, Awaitable[pubsub.StreamingPullResponse]],
]:
raise NotImplementedError()
@property
def modify_push_config(
self,
) -> Callable[
[pubsub.ModifyPushConfigRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def get_snapshot(
self,
) -> Callable[
[pubsub.GetSnapshotRequest], Union[pubsub.Snapshot, Awaitable[pubsub.Snapshot]]
]:
raise NotImplementedError()
@property
def list_snapshots(
self,
) -> Callable[
[pubsub.ListSnapshotsRequest],
Union[pubsub.ListSnapshotsResponse, Awaitable[pubsub.ListSnapshotsResponse]],
]:
raise NotImplementedError()
@property
def create_snapshot(
self,
) -> Callable[
[pubsub.CreateSnapshotRequest],
Union[pubsub.Snapshot, Awaitable[pubsub.Snapshot]],
]:
raise NotImplementedError()
@property
def update_snapshot(
self,
) -> Callable[
[pubsub.UpdateSnapshotRequest],
Union[pubsub.Snapshot, Awaitable[pubsub.Snapshot]],
]:
raise NotImplementedError()
@property
def delete_snapshot(
self,
) -> Callable[
[pubsub.DeleteSnapshotRequest],
Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
]:
raise NotImplementedError()
@property
def seek(
self,
) -> Callable[
[pubsub.SeekRequest], Union[pubsub.SeekResponse, Awaitable[pubsub.SeekResponse]]
]:
raise NotImplementedError()
@property
def set_iam_policy(
self,
) -> Callable[
[iam_policy_pb2.SetIamPolicyRequest],
Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]],
]:
raise NotImplementedError()
@property
def get_iam_policy(
self,
) -> Callable[
[iam_policy_pb2.GetIamPolicyRequest],
Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]],
]:
raise NotImplementedError()
@property
def test_iam_permissions(
self,
) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
Union[
iam_policy_pb2.TestIamPermissionsResponse,
Awaitable[iam_policy_pb2.TestIamPermissionsResponse],
],
]:
raise NotImplementedError()
@property
def kind(self) -> str:
raise NotImplementedError()
__all__ = ("SubscriberTransport",)
| [
"[email protected]"
] | |
de94c56c7667c7a7c99683be9a0bf04d00ecca56 | a5688a923c488414ecffcb92e3405d3876f1889d | /examples/computer_vision/mmdetection_pytorch/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py | 28f4fd4b13801f9cb47ceb3d715244d40c281c70 | [
"Apache-2.0"
] | permissive | armandmcqueen/determined | ae6e7a4d5d8c3fb6a404ed35519643cf33bd08e4 | 251e7093b60a92633b684586ac7a566379442f15 | refs/heads/master | 2023-05-28T17:52:18.915710 | 2021-06-09T23:55:59 | 2021-06-09T23:55:59 | 259,449,481 | 0 | 0 | Apache-2.0 | 2021-04-09T12:13:11 | 2020-04-27T20:47:23 | Go | UTF-8 | Python | false | false | 376 | py | _base_ = "./ga_faster_r50_fpn_1x_coco.py"
model = dict(
pretrained="open-mmlab://resnext101_64x4d",
backbone=dict(
type="ResNeXt",
depth=101,
groups=64,
base_width=4,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type="BN", requires_grad=True),
style="pytorch",
),
)
| [
"[email protected]"
] | |
3af8710d6b01d30dfe73cc770a65abe5f3cdfb70 | 99da8a6d2392472cb66e5b12c03142c90640186a | /BOJ/DFS&BFS/2573.py | 784c81d48ea577cc9b507ab56b9611ea6bc47a60 | [] | no_license | chorwonkim/__Algorithms__ | cf6cf4ae5cf091d856397369b6db1bb41f925377 | 0c1e58410ae90b72c0d7e44a6179b8fedc786131 | refs/heads/master | 2022-09-28T16:59:20.841482 | 2022-09-25T09:57:58 | 2022-09-25T09:57:58 | 130,082,803 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,723 | py | # import sys
# sys.setrecursionlimit(100000)
#
# dx = [-1, 0, 1, 0]
# dy = [0, -1, 0, 1]
#
# N, M = map(int, sys.stdin.readline().rstrip().split())
# ice_map = []
# ice_map_checker = []
# result = 0
#
#
# def func_2573(p, q):
# ice_map_checker[p][q] = True
#
# for k in range(4):
# ice_p = p + dx[k]
# ice_q = q + dy[k]
#
# if (not ice_map_checker[ice_p][ice_q]) and ice_map[ice_p][ice_q]:
# func_2573(ice_p, ice_q)
#
#
# for i in range(N):
# ice_map.append(list(map(int, sys.stdin.readline().rstrip().split())))
#
# while True:
# block = 0
# ice_map_checker.clear()
#
# for i in range(N):
# ice_map_checker.append([False] * M)
#
# for i in range(1, N-1):
# for j in range(1, M-1):
# if ice_map[i][j] and (not ice_map_checker[i][j]):
# block += 1
# func_2573(i, j)
#
# if block >= 2:
# print(result)
# break
# elif block == 0:
# print(0)
# break
#
# result += 1
#
# for i in range(N):
# for j in range(M):
# if (not ice_map_checker[i][j]) and (not ice_map[i][j]):
# for k in range(4):
# ice_x = i + dx[k]
# ice_y = j + dy[k]
#
# if 0 <= ice_x < N and 0 <= ice_y < M and ice_map[ice_x][ice_y]:
# ice_map[ice_x][ice_y] -= 1
import sys
sys.setrecursionlimit(100000)
Read = sys.stdin.readline
N, M = map(int, Read().split())
ice_map = [list(map(int, Read().split())) for _ in range(N)]
ice_map_checker = [[False for _ in range(M)] for _ in range(N)]
result = 0
def func_2573(p, q):
ice_map_checker[p][q] = True
for i, j in zip([-1,0,1,0], [0,-1,0,1]):
ice_p = p + i
ice_q = q + j
if (not ice_map_checker[ice_p][ice_q]) and ice_map[ice_p][ice_q]:
func_2573(ice_p, ice_q)
while True:
block = 0
ice_map_checker.clear()
ice_map_checker = [[False for _ in range(M)] for _ in range(N)]
for i in range(1, N-1):
for j in range(1, M-1):
if ice_map[i][j] and (not ice_map_checker[i][j]):
block += 1
func_2573(i, j)
if block >= 2:
print(result)
break
elif block == 0:
print(0)
break
result += 1
for i in range(N):
for j in range(M):
if (not ice_map_checker[i][j]) and (not ice_map[i][j]):
for p, q in zip([-1,0,1,0], [0,-1,0,1]):
ice_x = i + p
ice_y = j + q
if 0 <= ice_x < N and 0 <= ice_y < M and ice_map[ice_x][ice_y]:
ice_map[ice_x][ice_y] -= 1
| [
"[email protected]"
] | |
6fe6df1f0c3cf8fbc8b71b768fcb421f521466e2 | 2d4127f5fa1bca8ba41b9da48d9180c64680b327 | /openid_connect_op/utils/jwt.py | 7c7f306b1959045ffe75b8aaff0c8c8189a54732 | [
"MIT"
] | permissive | WilliBobadilla/django-openid-op | ce02b7fe9db4fa8aca0cec4df003c905927dfbe3 | 732812cab7610080289ae70b8ea791ba9f0105ad | refs/heads/master | 2023-04-19T08:49:19.962249 | 2021-05-03T18:21:26 | 2021-05-03T18:21:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,642 | py | import datetime
from functools import lru_cache
import python_jwt as jwt
import jwcrypto.jwk as jwk
from django.conf import settings
from os import urandom
from jwcrypto.jws import JWS
from jwcrypto.common import base64url_encode, json_encode, json_decode
from calendar import timegm
# need to add "kid" header which the original python_jwt can not do
from openid_connect_op.models import OpenIDClient
def generate_jwt_patched(claims, priv_key=None,
algorithm='PS512', lifetime=None, expires=None,
not_before=None,
jti_size=16, extra_headers={}):
"""
Generate a JSON Web Token.
:param claims: The claims you want included in the signature.
:type claims: dict
:param priv_key: The private key to be used to sign the token. Note: if you pass ``None`` then the token will be returned with an empty cryptographic signature and :obj:`algorithm` will be forced to the value ``none``.
:type priv_key: `jwcrypto.jwk.JWK <https://jwcrypto.readthedocs.io/en/latest/jwk.html>`_
:param algorithm: The algorithm to use for generating the signature. ``RS256``, ``RS384``, ``RS512``, ``PS256``, ``PS384``, ``PS512``, ``ES256``, ``ES384``, ``ES512``, ``HS256``, ``HS384``, ``HS512`` and ``none`` are supported.
:type algorithm: str
:param lifetime: How long the token is valid for.
:type lifetime: datetime.timedelta
:param expires: When the token expires (if :obj:`lifetime` isn't specified)
:type expires: datetime.datetime
:param not_before: When the token is valid from. Defaults to current time (if ``None`` is passed).
:type not_before: datetime.datetime
:param jti_size: Size in bytes of the unique token ID to put into the token (can be used to detect replay attacks). Defaults to 16 (128 bits). Specify 0 or ``None`` to omit the JTI from the token.
:type jti_size: int
:rtype: unicode
:returns: The JSON Web Token. Note this includes a header, the claims and a cryptographic signature. The following extra claims are added, per the `JWT spec <http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html>`_:
- **exp** (*IntDate*) -- The UTC expiry date and time of the token, in number of seconds from 1970-01-01T0:0:0Z UTC.
- **iat** (*IntDate*) -- The UTC date and time at which the token was generated.
- **nbf** (*IntDate*) -- The UTC valid-from date and time of the token.
- **jti** (*str*) -- A unique identifier for the token.
"""
header = {
'typ': 'JWT',
'alg': algorithm if priv_key else 'none'
}
header.update(extra_headers)
claims = dict(claims)
now = datetime.datetime.utcnow()
if jti_size:
claims['jti'] = base64url_encode(urandom(jti_size))
claims['nbf'] = timegm((not_before or now).utctimetuple())
claims['iat'] = timegm(now.utctimetuple())
if lifetime:
claims['exp'] = timegm((now + lifetime).utctimetuple())
elif expires:
claims['exp'] = timegm(expires.utctimetuple())
if header['alg'] == 'none':
signature = ''
else:
token = JWS(json_encode(claims))
token.add_signature(priv_key, protected=header)
signature = json_decode(token.serialize())['signature']
return u'%s.%s.%s' % (
base64url_encode(json_encode(header)),
base64url_encode(json_encode(claims)),
signature
)
class JWTTools:
@staticmethod
def generate_jwt(payload, for_client=None, ttl=None, from_client=None):
if for_client is None:
sign_alg = 'RS256'
elif for_client.client_auth_type == OpenIDClient.CLIENT_AUTH_TYPE_SECRET_JWT:
sign_alg = 'HS256'
else:
sign_alg = for_client.client_registration_data.get('id_token_signed_response_alg', 'RS256')
return JWTTools.generate_jwt_with_sign_alg(payload, sign_alg, ttl=ttl, client=from_client)
@staticmethod
def generate_jwt_with_sign_alg(payload, sign_alg, ttl=None, client=None):
from openid_connect_op.models import OpenIDClient
if not client:
client = OpenIDClient.self_instance()
if client.client_auth_type == client.CLIENT_AUTH_TYPE_SECRET_JWT:
sign_key = jwk.JWK(kty="oct", use="sig", alg="HS256", k=base64url_encode(client.client_hashed_secret))
extra_headers = {}
alg = 'HS256'
else:
sign_key = client.get_key(sign_alg)
extra_headers = {
'kid': sign_key.key_id
}
alg = sign_key._params['alg']
return generate_jwt_patched(payload,
sign_key,
alg,
extra_headers=extra_headers,
lifetime=ttl)
@staticmethod
def validate_jwt(token, client=None):
from openid_connect_op.models import OpenIDClient
if client is None:
client = OpenIDClient.self_instance()
if client.client_auth_type == OpenIDClient.CLIENT_AUTH_TYPE_SECRET_JWT:
key = jwk.JWK(kty="oct", use="sig", alg="HS256", k=base64url_encode(client.client_hashed_secret))
return jwt.verify_jwt(token, key, ['HS256'], checks_optional=True)
header, __ = jwt.process_jwt(token)
key = client.get_key(alg=header.get('alg', 'RS256'), kid=header.get('kid', None))
return jwt.verify_jwt(token, key, [key._params.get('alg', 'RS256')], checks_optional=True)
@staticmethod
def unverified_jwt_payload(token):
return jwt.process_jwt(token)[1] | [
"[email protected]"
] | |
80147bbb0710f00faafe35228d6fa40ec6be58de | 57d88ae2ee752f9624c10d81bb46fc979176f86d | /gisty.py | 966290439f230e79fb9a5d17832126519bda27c3 | [] | no_license | lambdamusic/Snipplr_2_Gist | e346cba5762ffb50cef63aa93430e229016b07f4 | bf23e99595cd43abfd301649c8727a894c80c2f6 | refs/heads/master | 2021-01-10T21:37:28.916160 | 2013-02-07T21:57:11 | 2013-02-07T21:57:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,673 | py | # mikele: I modified this so that I can pass stuff directly from console
# In [1]: from gisty import *
# In [2]: gist_list("username", "password")
# https://gist.github.com/3808121 - [u'new_gist.py'] 'Python: take input from stdin and create a new public gist with it'
# https://gist.github.com/3807911 - [u'base.html'] 'Html: base'
# https://gist.github.com/3096854 - [u'Tractatus-rgraph.js'] 'RGraph for Tractatus visualization'
# https://gist.github.com/9c0263e2e5e07e6eddb0 - [u'Scheme load rss into Impromptu 2 ', u'Scheme: load rss into Impromptu 2 ', u'Scheme.scm'] 'Scheme: load rss into Impromptu'
# In [3]: gist_post_console("test.py", "define (x)", True, "USER", "PSW")
# Posting gist as magicrebirth
# Posted to https://gist.github.com/3808191
# Git pull: git://gist.github.com/3808191.git
# Git push: [email protected]:3808191.git
#!/usr/bin/env python
import subprocess
import platform
import optparse
import urllib2
import getpass
import sys
import os
try:
import simplejson as json
except ImportError:
import json
def getcontent(fname):
""" read content from file
:returns: file contents"""
with open(fname) as f:
return f.read()
def bauth(user, password):
"""perform basic auth
:param user: the username
:param password: the password
:returns: string in the format: "Basic THEHASH"
"""
s = user + ":" + password
return "Basic " + s.encode("base64").rstrip()
def parse_list(response):
"""Parse a github gist listing .
:param response: the returned body."""
try:
gists = json.loads(response.replace('\n', '\\n'))
if type(gists) is list:
for entry in gists:
print "%s - %s '%s'" % (entry['html_url'],
entry['files'].keys(), entry['description'])
else:
print "%s - %s '%s'" % (gists['html_url'],
gists['files'].keys(), gists['description'])
except Exception as err:
print "Error parsing json: %s" % err
print "=" * 79
print repr(response)
print "=" * 79
return None
def parse_post(response):
"""Parse a github gist posting.
:param response: the returned body."""
try:
gist = json.loads(response.replace('\n', '\\n'))
print "Posted to %s" % gist['html_url']
if platform.system() == 'Darwin':
os.system('echo "%s" | pbcopy' % gist['html_url'])
print "Git pull: %s" % gist['git_pull_url']
print "Git push: %s" % gist['git_push_url']
except Exception as err:
print "Error parsing json: %s" % err
print "=" * 79
print repr(response)
print "=" * 79
def gist_list(user, password=None, gid=None):
"""Peform a gist listing
:param user: github user name for auth.
:param password: github user password.
:param gid: gist id to retrieve or None for all"""
if gid is None:
if user is not None:
url = "https://api.github.com/users/%s/gists" % user
else:
print "Couldn't find your github username."
sys.exit(1)
else:
url = "https://api.github.com/gists/%s" % gid
if user is not None and password is not None:
req = urllib2.Request(url, headers = {
'Authorization': bauth(user, password),
'Accept': '*/*',
'User-Agent': 'gistipy/1'})
else:
req = urllib2.Request(url, headers = {
'Accept': '*/*',
'User-Agent': 'gistipy/1'})
try:
f = urllib2.urlopen(req)
response = f.read()
f.close()
parse_list(response)
except Exception as err:
print "Error getting gist(s): %s" % err
def gist_post(fname, public=True, user=None, password=None):
"""
Post a anonymous, public or private gist to github.
{
"description": "the description for this gist",
"public": true,
"files": {
"file1.txt": {
"content": "String file contents"
}
}
}
:param fname: The gist filename.
:param public: Whether this gist is public (True) or private (False)
:param user: github user
:param password: github password
"""
if fname is sys.stdin:
content = fname.read()
fname = 'stdin'
else:
content = getcontent(fname)
url = "https://api.github.com/gists"
gist = {}
gist['description'] = fname
gist['public'] = public
gist['files'] = {fname: {'content': content}}
data = json.dumps(gist)
if user is not None and password is not None:
req = urllib2.Request(url, data=data, headers = {
'Authorization': bauth(user, password),
'Content-Type': 'application/json',
'Accept': '*/*',
'User-Agent': 'gistipy/1'})
print "Posting gist as %s" % user
else:
req = urllib2.Request(url, data=data, headers = {
'Content-Type': 'application/json',
'Accept': '*/*',
'User-Agent': 'gistipy/1'})
print "Posting anonymous gist..."
try:
f = urllib2.urlopen(req)
response = f.read()
f.close()
parse_post(response)
except Exception as err:
print "Error posting gist: %s" % err
# mikele: I modified this so that I can pass stuff directly from console
# In [1]: from gisty import *
# In [2]: gist_list("magicrebirth", "zabiz99")
# https://gist.github.com/3808121 - [u'new_gist.py'] 'Python: take input from stdin and create a new public gist with it'
# https://gist.github.com/3807911 - [u'base.html'] 'Html: base'
# https://gist.github.com/3096854 - [u'Tractatus-rgraph.js'] 'RGraph for Tractatus visualization'
# https://gist.github.com/9c0263e2e5e07e6eddb0 - [u'Scheme load rss into Impromptu 2 ', u'Scheme: load rss into Impromptu 2 ', u'Scheme.scm'] 'Scheme: load rss into Impromptu'
# In [3]: gist_post_console("test.py", "this snippett does.....", define (x)", True, "magicrebirth", "zabiz99")
# Posting gist as magicrebirth
# Posted to https://gist.github.com/3808191
# Git pull: git://gist.github.com/3808191.git
# Git push: [email protected]:3808191.git
def gist_post_console(fname, description, content, public=True, user=None, password=None):
"""
Post a anonymous, public or private gist to github.
{
"description": "the description for this gist",
"public": true,
"files": {
"file1.txt": {
"content": "String file contents"
}
}
}
:param fname: The gist filename.
:param description: The gist description.
:param content: The gist content.
:param public: Whether this gist is public (True) or private (False)
:param user: github user
:param password: github password
"""
# if fname is sys.stdin:
# content = fname.read()
# fname = 'stdin'
# else:
# content = getcontent(fname)
url = "https://api.github.com/gists"
gist = {}
gist['description'] = description
gist['public'] = public
gist['files'] = {fname: {'content': content}}
data = json.dumps(gist)
if user is not None and password is not None:
req = urllib2.Request(url, data=data, headers = {
'Authorization': bauth(user, password),
'Content-Type': 'application/json',
'Accept': '*/*',
'User-Agent': 'gistipy/1'})
print "Posting gist as %s" % user
else:
req = urllib2.Request(url, data=data, headers = {
'Content-Type': 'application/json',
'Accept': '*/*',
'User-Agent': 'gistipy/1'})
print "Posting anonymous gist..."
try:
f = urllib2.urlopen(req)
response = f.read()
f.close()
parse_post(response)
except Exception as err:
print "Error posting gist: %s" % err
def get_gh_user():
cmd = ['git', 'config', '--get', 'github.user']
run = subprocess.Popen(cmd, stdout=subprocess.PIPE)
user = run.stdout.readline().strip()
if user:
return user
else:
try:
return os.environ['GITHUB_USER']
except KeyError:
return None
def get_gh_pass():
cmd = ['git', 'config', '--get', 'github.password']
run = subprocess.Popen(cmd, stdout=subprocess.PIPE)
password = run.stdout.readline().strip()
if password:
return password
else:
try:
return os.environ['GITHUB_PASSWORD']
except KeyError:
return None
def main():
print "=" * 79
usage = '''
usage: %prog [-v] [-p|--private] [-l|--list] [gist.file]
'''
args = optparse.OptionParser(usage)
args.add_option('--verbose', '-v', action="store_true",
help="Print verbose info")
args.add_option('--private', '-p', action="store_true",
help="post private gist")
args.add_option('--anonymous', '-a', action="store_true",
help="post an anonymous git")
args.add_option('--list', '-l', action="store_true",
help="list gists, use with -a to list only your public gists")
options, arguments = args.parse_args()
if options.private:
public = False
else:
public = True
if not options.anonymous:
ghuser = get_gh_user()
ghpass = get_gh_pass()
if not ghuser:
ghuser = raw_input("Enter your github username: ")
if len(ghuser) is 0:
print "Aborting..."
sys.exit(1)
if not ghpass:
ghpass = getpass.getpass("Enter password for %[email protected]: " % \
ghuser)
else:
ghuser = None
ghpass = None
if options.list:
ghuser = get_gh_user()
gist_list(user=ghuser, password=ghpass)
sys.exit(0)
if not arguments:
gist_post(sys.stdin, public=public, user=ghuser, password=ghpass)
else:
for fname in arguments:
if os.path.isfile(fname):
gist_post(fname, public=public, user=ghuser, password=ghpass)
else:
cmd = '%s ./%s' % (os.environ.get('EDITOR', 'vim'), fname)
os.system(cmd)
if os.path.isfile(fname):
sendit = raw_input("Post %s as gist? (y/n)[y]: " % fname)
if sendit == "y" or sendit == "yes" or len(sendit) is 0:
gist_post(fname, public=public,
user=ghuser, password=ghpass)
else:
print "Did not post %s as gist." % fname
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print '\n' | [
"[email protected]"
] | |
24e1af42b6cf8518e8c366fe186394139c53ee9a | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /2f9vjBiynkBtF3TBi_2.py | 78016bb10def75086de7b057fd4c4c5f57730f40 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,053 | py | """
In this challenge, you must verify the equality of two different values given
the parameters `a` and `b`.
Both the _value_ and _type_ of the parameters need to be equal. The possible
types of the given parameters are:
* Numbers
* Strings
* Booleans (`False` or `True`)
* Special values: `None`
What have you learned so far that will permit you to do two different checks
(value **and** type) with a single statement?
Implement a function that returns `True` if the parameters are equal, and
`False` if they are not.
### Examples
check_equality(1, true) ➞ False
# A number and a boolean: the value and type are different.
check_equality(0, "0") ➞ False
# A number and a string: the type is different.
check_equality(1, 1) ➞ True
# A number and a number: the type and value are equal.
### Notes
* If you get stuck on a challenge, find help in the **Resources** tab.
* If you're _really_ stuck, unlock solutions in the **Solutions** tab.
"""
def check_equality(a, b):
return a is b
| [
"[email protected]"
] | |
32f75d1ab64a5f215393d6f5c1848e0138edc409 | eb5fa3bdbad17674e95b360694e6d794387a557c | /menuhin/middleware.py | 2f5bf781bf9e2cac82946707eb46785c3e434752 | [
"BSD-2-Clause-Views",
"BSD-2-Clause"
] | permissive | kezabelle/django-menuhin | 0648bc923fe159612846282a526f2c60d0f535a9 | b9c4111eed04e241c29ca8ec95c8a3ffeaac48da | refs/heads/master | 2021-05-02T02:00:44.896065 | 2014-12-19T10:00:50 | 2014-12-19T10:00:50 | 14,557,352 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,201 | py | import logging
from django.utils.functional import SimpleLazyObject
from django.conf import settings
from django.core.urlresolvers import reverse, NoReverseMatch
from .models import MenuItem
from .utils import (LengthLazyObject, get_menuitem_or_none,
get_relations_for_request)
logger = logging.getLogger(__name__)
class RequestTreeMiddleware(object):
def get_ignorables(self):
if hasattr(settings, 'STATIC_URL') and settings.STATIC_URL:
yield settings.STATIC_URL
if hasattr(settings, 'MEDIA_URL') and settings.MEDIA_URL:
yield settings.MEDIA_URL
try:
yield reverse('admin:index')
except NoReverseMatch: # pragma: no cover
logger.debug("Admin is not mounted")
def process_request(self, request):
ignored_prefixes = tuple(self.get_ignorables())
if request.path.startswith(ignored_prefixes):
logger.debug("Skipping this request")
return None
def lazy_menuitem():
return get_menuitem_or_none(MenuItem, request.path)
def lazy_ancestors_func():
return get_relations_for_request(
model=MenuItem, request=request,
relation='get_ancestors').relations
def lazy_descendants_func():
return get_relations_for_request(
model=MenuItem, request=request,
relation='get_descendants').relations
def lazy_siblings_func():
return get_relations_for_request(
model=MenuItem, request=request,
relation='get_siblings').relations
def lazy_children_func():
return get_relations_for_request(
model=MenuItem, request=request,
relation='get_children').relations
request.menuitem = SimpleLazyObject(lazy_menuitem)
request.ancestors = LengthLazyObject(lazy_ancestors_func)
request.descendants = LengthLazyObject(lazy_descendants_func)
request.siblings = LengthLazyObject(lazy_siblings_func)
request.children = LengthLazyObject(lazy_children_func)
| [
"[email protected]"
] | |
da11b4cca48ae0c88eb3b8c66a041828ea0b021e | e38db85f6a13b32c60bf66d78838d6ed348f1798 | /healthplans/tests.py | 549fcf58df9886eecfa78cb4e957f908105e148f | [
"BSD-2-Clause"
] | permissive | westurner/health-marketplace | ab72d60d9469c9f3622bc64c391b222018c7c7a1 | 15f5379cc213e2e2b2150e967b56092ea8468db2 | refs/heads/master | 2020-12-28T21:28:09.415775 | 2013-11-09T17:55:08 | 2013-11-09T17:55:08 | 14,261,431 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,880 | py | """
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
from healthplans.models import Provider
from healthplans.models import Plan
class HealtplansModelsTest(TestCase):
def test_011_provider_create(self):
provider = Provider.objects.create(
name="Test Provider")
self.assertIsNotNone(provider)
self.assertIsNotNone(provider.created_time)
self.assertIsNotNone(provider.updated_time)
def test_012_plan_create(self):
provider = Provider.objects.create(
name="Test Provider")
COST = 1.05
plan = Plan.objects.create(
provider=provider,
name="Test Plan",
category=Plan.PLATINUM,
base_rate=COST)
self.assertIsNotNone(plan)
self.assertIsNotNone(plan.created_time)
self.assertIsNotNone(plan.updated_time)
self.assertEqual(plan.base_rate, COST)
def test_021_provider_slug(self):
provider = Provider.objects.create(
name="Test Provider")
self.assertEqual(provider.slug, "Test-Provider")
def test_022_plan_slug(self):
provider = Provider.objects.create(
name="Test Provider")
COST = 1.05
plan = Plan.objects.create(
provider=provider,
name="Test Plan",
category=Plan.PLATINUM,
base_rate=COST)
self.assertIsNotNone(plan)
self.assertEqual(plan.slug, "Test-Plan")
from django.test import Client
class HealthplansViewsTest(TestCase):
def setUp(self):
self.client = Client()
def test_homepage_links(self):
response = self.client.get('/')
self.assertContains(response, 'href="/providers/"')
self.assertContains(response, 'href="/plans/"')
def test_provider_list(self):
provider = Provider.objects.create(
name="Test Provider") # TODO: fixtures
response = self.client.get('/providers/')
self.assertContains(response, provider.name)
self.assertContains(response, 'href="/providers/%s"' % provider.slug)
def test_provider_detail(self):
provider = Provider.objects.create(
name="Test Provider") # TODO: fixtures
response = self.client.get('/providers/%s' % provider.slug)
self.assertContains(response, provider.name)
self.assertContains(response, 'href="/providers/%s">' % provider.slug)
def test_plan_list(self):
provider = Provider.objects.create(
name="Test Provider") # TODO: fixtures
plan = Plan.objects.create(
provider=provider,
name="Test Plan",
category=Plan.PLATINUM,
base_rate=1.05)
response = self.client.get('/plans/')
self.assertContains(response, plan.name)
self.assertContains(response, 'href="/plans/%s"' % plan.slug)
def test_plan_detail(self):
provider = Provider.objects.create(
name="Test Provider") # TODO: fixtures
plan = Plan.objects.create(
provider=provider,
name="Test Plan",
category=Plan.PLATINUM,
base_rate=1.05)
response = self.client.get('/plans/%s' % plan.slug)
self.assertContains(response, plan.name)
self.assertContains(response, 'href="/plans/%s"' % plan.slug)
class HealthplansAdminTest(TestCase):
def setUp(self):
self.client = Client()
self._superuser_login()
def _superuser_login(self):
USERNAME = 'test'
EMAIL = '[email protected]'
PASSWORD = 'TODOTODOTODO'
from django.db import DEFAULT_DB_ALIAS as db
from django.contrib.auth.models import User
User.objects.db_manager(db).create_superuser(
USERNAME, EMAIL, PASSWORD)
logged_in = self.client.login(
username=USERNAME,
password=PASSWORD)
self.assertEqual(logged_in, True)
def test_provider_admin(self):
provider = Provider.objects.create(
name="Test Provider") # TODO: fixtures
response = self.client.get('/admin/healthplans/provider/')
self.assertContains(response, provider.name)
def test_plan_admin(self):
provider = Provider.objects.create(
name="Test Provider") # TODO: fixtures
plan = Plan.objects.create(
provider=provider,
name="Test Plan",
category=Plan.PLATINUM,
base_rate=1.05)
response = self.client.get('/admin/healthplans/plan/')
self.assertContains(response, plan.name)
| [
"[email protected]"
] | |
098c796afacb67484884c0be523186710e54f517 | 32e6e405bebc7c63ca0a1721512a9abe4c2ed7cb | /food_reference_listing/database/archive_loader/initialize_database.py | 86532879fe4e3fdf107fdcc2b05257eba61c3ebd | [
"MIT"
] | permissive | bfssi-forest-dussault/food_reference_listing | d034d853a966967a94b6a1e82beee442786f1e7f | 85372a81a9201dda02797ab0c11b1bd710f9b70d | refs/heads/master | 2023-01-03T22:44:13.388149 | 2020-10-29T18:40:18 | 2020-10-29T18:40:18 | 241,152,908 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,294 | py | import sys
import django
from pathlib import Path
# Need to do this in order to access the database models
sys.path.append("/app")
django.setup()
# For some bizarre reason I need to import all functions, rather than have them be defined in this file vOv
from food_reference_listing.database.archive_loader.helpers import *
"""
Script to load the archived data into the new database.
Call it using this command:
docker-compose -f local.yml run --rm django python manage.py shell < food_reference_listing/database/archive_loader/initialize_database.py
"""
# Script starts here
data_dir = Path('/app/food_reference_listing/database/archive_data')
assert data_dir.is_dir()
print(f'Confirmed data is available in {data_dir}')
# Keys correspond 1:1 with database.Model names
table_dict = {
'Acronym': data_dir / 'Acronyms.csv',
'AcronymType': data_dir / 'AcronymTypes.csv',
'Category': data_dir / 'Categories.csv',
'Country': data_dir / 'Countries.csv',
'ProvinceState': data_dir / 'ProvinceStates.csv',
'City': data_dir / 'Cities.csv',
'Company': data_dir / 'Companies.csv',
'Subcategory': data_dir / 'SubCategories.csv',
'Product': data_dir / 'Products_Combined.csv', # Combination of Products.csv and Final Web Update.csv
'Language': data_dir / 'Languages.csv',
}
print(f'Confirming expected source data files exist')
# Make sure our data files exist
for model, src in table_dict.items():
assert src.exists()
print(f'Deleting all entries in existing database')
# Cleanup while we debug
delete_all_rows_in_all_tables()
# Start populating tables
print(f'Recreating database with data files from {data_dir}')
populate_language_table(data=read_csv(table_dict['Language']))
populate_acronym_type_table(data=read_csv(table_dict['AcronymType']))
populate_acronym_table(data=read_csv(table_dict['Acronym']))
populate_category_table(data=read_csv(table_dict['Category']))
populate_country_table(data=read_csv(table_dict['Country']))
populate_provincestate_table(data=read_csv(table_dict['ProvinceState']))
populate_city_table(data=read_csv(table_dict['City']))
populate_company_table(data=read_csv(table_dict['Company']))
populate_subcategory_table(data=read_csv(table_dict['Subcategory']))
populate_product_table(data=read_csv(table_dict['Product']))
| [
"[email protected]"
] | |
0e73a9a57c08e990f0dd90913c0a96dfe81fc02a | 6f7032e954334d102a9e1eff8f420f0a8b7ee70a | /pytestpackage/test_conftest_demo1.py | 2820a6af2e706f8e8ff16ee55e6d55b40b53b7c1 | [] | no_license | suchismitarout/selenium_practice | 186596e14ed66550ef184703a04aa323faad9f45 | 2281e6a3a526f2ff5c1c566517dc85b4ae23b85b | refs/heads/master | 2022-12-26T12:16:17.297471 | 2020-10-06T06:21:04 | 2020-10-06T06:21:04 | 301,632,911 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | import pytest
def test_demo_conf1(onetimesetUp):
print("This is conftest demo")
def test_demo_conf2(onetimesetUp):
print("This is second conftest demo") | [
"[email protected]"
] | |
d16c9b91a5c897f61fb9bd659fd74a37ceeb379c | 5995b039f039accf17538283a51668be929aeaea | /red_mind/red_mind/wsgi.py | 6eb39eb6bbcbd2a0289055e40fd2dc5560db6e82 | [] | no_license | harshitdixit69/red_mind_heart | 0c9bed294a8e89b05ecdd93590da25f79139ad6c | 8693ddef44e949219eab6670a3b1b862646f3dd8 | refs/heads/main | 2022-12-29T23:17:51.061114 | 2020-10-14T21:12:21 | 2020-10-14T21:12:21 | 304,288,555 | 0 | 0 | null | 2020-10-15T10:21:03 | 2020-10-15T10:21:02 | null | UTF-8 | Python | false | false | 393 | py | """
WSGI config for red_mind project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'red_mind.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
d271a9cf8126ae56eb7272193959b38f7de60c87 | 7463a66dfa00572f4e4d8ef4349309531f0105ae | /ServerDL/cfgs/configfiles.py | 3ba6e38d4d33dfb1119a53e1b5ed363987540186 | [] | no_license | fx19940824/DetectionModel | f2e380fd21f4b31a17fd175a6dea1067b8f0d5cc | edc0d2f9eea481d2bc6f3abb2f222b59fdc25538 | refs/heads/master | 2022-12-20T19:58:32.224829 | 2019-05-30T01:16:05 | 2019-05-30T01:16:05 | 188,800,679 | 2 | 0 | null | 2022-11-22T02:39:23 | 2019-05-27T08:13:38 | Python | UTF-8 | Python | false | false | 2,983 | py | from ServerDL.apis.transformers import *
from ServerDL.apis.postprocesses import *
from Algorithm.classifications.utils.model_factory import classification
from Algorithm.segmentation.deeplabv3plus.modeling.deeplab import DeepLab
model_register_table = dict()
model_register_table["maskrcnn_box"] = {
"model_weight_path": "/media/cobot/30b0f4a0-3376-4f8f-b458-9c6857504361/Projects/bag/resnext101/model_0660000.pth", # 选填,会覆盖model_cfg_string中WEIGHT的路径
"model_cfg_file":"/media/cobot/30b0f4a0-3376-4f8f-b458-9c6857504361/Projects/bag/resnext101/n_rcnn.yaml",
"model_type": "RCNN",
"model_transformer": build_transform_maskrcnn,
"model_handle_function": build_postprocess_plgdetection,
"model_network": "",
"confidence_threshold": 0.9
}
model_register_table["maskrcnn_bag"] = {
"model_weight_path": "/media/cobot/30b0f4a0-3376-4f8f-b458-9c6857504361/Projects/bag/resnext101/model_0660000.pth", # 选填,会覆盖model_cfg_string中WEIGHT的路径
"model_cfg_file":"/media/cobot/30b0f4a0-3376-4f8f-b458-9c6857504361/Projects/bag/resnext101/n_rcnn.yaml",
"model_type": "RCNN",
"model_transformer": build_transform_maskrcnn,
"model_handle_function": build_postprocess_plgdetection,
"model_network": "",
"confidence_threshold": 0.7
}
model_register_table["maskrcnn_tube"] = {
"model_weight_path": "/home/cobot/tube_model/model_0505000.pth",
"model_cfg_file": "/home/cobot/tube_model/n_rcnn.yaml",
"model_type": "RCNN",
"model_transformer": build_transform_maskrcnn,
"model_handle_function": build_postprocess_plgdetection,
"model_network": "",
"confidence_threshold": 0.8
}
model_register_table["CLS"] = {
"model_cfg_string":
'''
modelname = resnet18
classes = 2
img_size = 224
lr = 0.001
batchsize = 32
epochs = 10
freeze_layers = 0
is_Train = True
transformer = default
half = True
''',
"model_weight_path": "/home/cobot/caid2.0/python/Main/ServerDL/weights/test.pt",
"model_type": "CLS",
"model_transformer": build_transform_cls,
"model_handle_function": build_postprocess_cls,
"model_network": classification
}
model_register_table["DeepLabv3+"] = {
"model_cfg_string":
'''
num_classes = 2
backbone = mobilenet
output_stride = 16
sync_bn = False
freeze_bn = False
img_size = 257
''',
"model_weight_path": "/home/cobot/model_best.pth.tar",
"model_type": "SEG",
"model_transformer": build_transform_seg,
"model_handle_function": build_postprocess_seg,
"model_network": DeepLab
}
| [
"[email protected]"
] | |
8196a02855f2ae340915ff324e2199e99211f8a6 | 3eadf176c95f0c924af53770441afc0797d7c6ff | /memberships/admin.py | ccbd60dec107d29cc05318f498a7790d51b957e8 | [
"MIT"
] | permissive | Maneesh9063/video-membership | d144d559fe721e4a077659d610e6c0e8c35062d6 | c241310a6a4e3b7ee831b1d65ae8afaef7d3076a | refs/heads/master | 2022-12-08T23:58:37.176632 | 2020-09-04T15:41:40 | 2020-09-04T15:41:40 | 292,874,425 | 0 | 0 | MIT | 2020-09-04T14:52:54 | 2020-09-04T14:52:53 | null | UTF-8 | Python | false | false | 227 | py | from django.contrib import admin
# Register your models here.
from .models import Membership, UserMembership, Subscription
admin.site.register(Membership)
admin.site.register(UserMembership)
admin.site.register(Subscription)
| [
"[email protected]"
] | |
eb4dea118069aa919f4783f664f547821c76ec42 | ae91e7f7e80f891a8116cdaba5b52b1b3175f7c3 | /tests/hwsim/test_ap_tdls.py | c4d6dbc2f137a30970a572ce685454db44de2b23 | [
"BSD-3-Clause"
] | permissive | AlejandroAbad/hostap | fda648e9f874be650193519dd552f8f31fdd210d | 60400d5e1f21f4a1357185d4c8e3a4f781375e25 | refs/heads/master | 2020-06-04T20:27:33.386538 | 2014-04-02T18:25:05 | 2014-04-02T18:25:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,893 | py | # TDLS tests
# Copyright (c) 2013, Jouni Malinen <[email protected]>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import time
import logging
logger = logging.getLogger()
import hwsim_utils
from hostapd import HostapdGlobal
from hostapd import Hostapd
import hostapd
from wlantest import Wlantest
def start_ap_wpa2_psk(ifname):
params = hostapd.wpa2_params(ssid="test-wpa2-psk", passphrase="12345678")
hostapd.add_ap(ifname, params)
def connectivity(dev, ap_ifname):
hwsim_utils.test_connectivity_sta(dev[0], dev[1])
hwsim_utils.test_connectivity(dev[0].ifname, ap_ifname)
hwsim_utils.test_connectivity(dev[1].ifname, ap_ifname)
def connect_2sta(dev, ssid, ap_ifname):
dev[0].connect(ssid, psk="12345678", scan_freq="2412")
dev[1].connect(ssid, psk="12345678", scan_freq="2412")
connectivity(dev, ap_ifname)
def connect_2sta_wpa2_psk(dev, ap_ifname):
connect_2sta(dev, "test-wpa2-psk", ap_ifname)
def connect_2sta_wpa_psk(dev, ap_ifname):
connect_2sta(dev, "test-wpa-psk", ap_ifname)
def connect_2sta_wpa_psk_mixed(dev, ap_ifname):
dev[0].connect("test-wpa-mixed-psk", psk="12345678", proto="WPA",
scan_freq="2412")
dev[1].connect("test-wpa-mixed-psk", psk="12345678", proto="WPA2",
scan_freq="2412")
connectivity(dev, ap_ifname)
def connect_2sta_wep(dev, ap_ifname):
dev[0].connect("test-wep", key_mgmt="NONE", wep_key0='"hello"',
scan_freq="2412")
dev[1].connect("test-wep", key_mgmt="NONE", wep_key0='"hello"',
scan_freq="2412")
connectivity(dev, ap_ifname)
def connect_2sta_open(dev, ap_ifname):
dev[0].connect("test-open", key_mgmt="NONE", scan_freq="2412")
dev[1].connect("test-open", key_mgmt="NONE", scan_freq="2412")
connectivity(dev, ap_ifname)
def wlantest_setup():
wt = Wlantest()
wt.flush()
wt.add_passphrase("12345678")
wt.add_wepkey("68656c6c6f")
def wlantest_tdls_packet_counters(bssid, addr0, addr1):
wt = Wlantest()
dl = wt.get_tdls_counter("valid_direct_link", bssid, addr0, addr1)
inv_dl = wt.get_tdls_counter("invalid_direct_link", bssid, addr0, addr1)
ap = wt.get_tdls_counter("valid_ap_path", bssid, addr0, addr1)
inv_ap = wt.get_tdls_counter("invalid_ap_path", bssid, addr0, addr1)
return [dl,inv_dl,ap,inv_ap]
def tdls_check_dl(sta0, sta1, bssid, addr0, addr1):
wt = Wlantest()
wt.tdls_clear(bssid, addr0, addr1)
hwsim_utils.test_connectivity_sta(sta0, sta1)
[dl,inv_dl,ap,inv_ap] = wlantest_tdls_packet_counters(bssid, addr0, addr1)
if dl == 0:
raise Exception("No valid frames through direct link")
if inv_dl > 0:
raise Exception("Invalid frames through direct link")
if ap > 0:
raise Exception("Unexpected frames through AP path")
if inv_ap > 0:
raise Exception("Invalid frames through AP path")
def tdls_check_ap(sta0, sta1, bssid, addr0, addr1):
wt = Wlantest()
wt.tdls_clear(bssid, addr0, addr1);
hwsim_utils.test_connectivity_sta(sta0, sta1)
[dl,inv_dl,ap,inv_ap] = wlantest_tdls_packet_counters(bssid, addr0, addr1)
if dl > 0:
raise Exception("Unexpected frames through direct link")
if inv_dl > 0:
raise Exception("Invalid frames through direct link")
if ap == 0:
raise Exception("No valid frames through AP path")
if inv_ap > 0:
raise Exception("Invalid frames through AP path")
def check_connectivity(sta0, sta1, ap):
hwsim_utils.test_connectivity_sta(sta0, sta1)
hwsim_utils.test_connectivity(sta0.ifname, ap['ifname'])
hwsim_utils.test_connectivity(sta1.ifname, ap['ifname'])
def setup_tdls(sta0, sta1, ap, reverse=False, expect_fail=False):
logger.info("Setup TDLS")
check_connectivity(sta0, sta1, ap)
bssid = ap['bssid']
addr0 = sta0.p2p_interface_addr()
addr1 = sta1.p2p_interface_addr()
wt = Wlantest()
wt.tdls_clear(bssid, addr0, addr1);
wt.tdls_clear(bssid, addr1, addr0);
sta0.tdls_setup(addr1)
time.sleep(1)
if expect_fail:
tdls_check_ap(sta0, sta1, bssid, addr0, addr1)
return
if reverse:
addr1 = sta0.p2p_interface_addr()
addr0 = sta1.p2p_interface_addr()
conf = wt.get_tdls_counter("setup_conf_ok", bssid, addr0, addr1);
if conf == 0:
raise Exception("No TDLS Setup Confirm (success) seen")
tdls_check_dl(sta0, sta1, bssid, addr0, addr1)
check_connectivity(sta0, sta1, ap)
def teardown_tdls(sta0, sta1, ap):
logger.info("Teardown TDLS")
check_connectivity(sta0, sta1, ap)
bssid = ap['bssid']
addr0 = sta0.p2p_interface_addr()
addr1 = sta1.p2p_interface_addr()
sta0.tdls_teardown(addr1)
time.sleep(1)
wt = Wlantest()
teardown = wt.get_tdls_counter("teardown", bssid, addr0, addr1);
if teardown == 0:
raise Exception("No TDLS Setup Teardown seen")
tdls_check_ap(sta0, sta1, bssid, addr0, addr1)
check_connectivity(sta0, sta1, ap)
def test_ap_tdls_discovery(dev, apdev):
"""WPA2-PSK AP and two stations using TDLS discovery"""
start_ap_wpa2_psk(apdev[0]['ifname'])
wlantest_setup()
connect_2sta_wpa2_psk(dev, apdev[0]['ifname'])
dev[0].request("TDLS_DISCOVER " + dev[1].p2p_interface_addr())
time.sleep(0.2)
def test_ap_wpa2_tdls(dev, apdev):
"""WPA2-PSK AP and two stations using TDLS"""
start_ap_wpa2_psk(apdev[0]['ifname'])
wlantest_setup()
connect_2sta_wpa2_psk(dev, apdev[0]['ifname'])
setup_tdls(dev[0], dev[1], apdev[0])
teardown_tdls(dev[0], dev[1], apdev[0])
setup_tdls(dev[1], dev[0], apdev[0])
#teardown_tdls(dev[0], dev[1], apdev[0])
def test_ap_wpa2_tdls_concurrent_init(dev, apdev):
"""Concurrent TDLS setup initiation"""
start_ap_wpa2_psk(apdev[0]['ifname'])
wlantest_setup()
connect_2sta_wpa2_psk(dev, apdev[0]['ifname'])
dev[0].request("SET tdls_testing 0x80")
setup_tdls(dev[1], dev[0], apdev[0], reverse=True)
def test_ap_wpa2_tdls_concurrent_init2(dev, apdev):
"""Concurrent TDLS setup initiation (reverse)"""
start_ap_wpa2_psk(apdev[0]['ifname'])
wlantest_setup()
connect_2sta_wpa2_psk(dev, apdev[0]['ifname'])
dev[1].request("SET tdls_testing 0x80")
setup_tdls(dev[0], dev[1], apdev[0])
def test_ap_wpa2_tdls_decline_resp(dev, apdev):
"""Decline TDLS Setup Response"""
start_ap_wpa2_psk(apdev[0]['ifname'])
wlantest_setup()
connect_2sta_wpa2_psk(dev, apdev[0]['ifname'])
dev[1].request("SET tdls_testing 0x200")
setup_tdls(dev[1], dev[0], apdev[0], expect_fail=True)
def test_ap_wpa2_tdls_long_lifetime(dev, apdev):
"""TDLS with long TPK lifetime"""
start_ap_wpa2_psk(apdev[0]['ifname'])
wlantest_setup()
connect_2sta_wpa2_psk(dev, apdev[0]['ifname'])
dev[1].request("SET tdls_testing 0x40")
setup_tdls(dev[1], dev[0], apdev[0])
def test_ap_wpa2_tdls_long_frame(dev, apdev):
"""TDLS with long setup/teardown frames"""
start_ap_wpa2_psk(apdev[0]['ifname'])
wlantest_setup()
connect_2sta_wpa2_psk(dev, apdev[0]['ifname'])
dev[0].request("SET tdls_testing 0x1")
dev[1].request("SET tdls_testing 0x1")
setup_tdls(dev[1], dev[0], apdev[0])
teardown_tdls(dev[1], dev[0], apdev[0])
setup_tdls(dev[0], dev[1], apdev[0])
def test_ap_wpa2_tdls_reneg(dev, apdev):
"""Renegotiate TDLS link"""
start_ap_wpa2_psk(apdev[0]['ifname'])
wlantest_setup()
connect_2sta_wpa2_psk(dev, apdev[0]['ifname'])
setup_tdls(dev[1], dev[0], apdev[0])
setup_tdls(dev[0], dev[1], apdev[0])
def test_ap_wpa2_tdls_wrong_lifetime_resp(dev, apdev):
"""Incorrect TPK lifetime in TDLS Setup Response"""
start_ap_wpa2_psk(apdev[0]['ifname'])
wlantest_setup()
connect_2sta_wpa2_psk(dev, apdev[0]['ifname'])
dev[1].request("SET tdls_testing 0x10")
setup_tdls(dev[0], dev[1], apdev[0], expect_fail=True)
def test_ap_wpa2_tdls_diff_rsnie(dev, apdev):
"""TDLS with different RSN IEs"""
start_ap_wpa2_psk(apdev[0]['ifname'])
wlantest_setup()
connect_2sta_wpa2_psk(dev, apdev[0]['ifname'])
dev[1].request("SET tdls_testing 0x2")
setup_tdls(dev[1], dev[0], apdev[0])
teardown_tdls(dev[1], dev[0], apdev[0])
def test_ap_wpa_tdls(dev, apdev):
"""WPA-PSK AP and two stations using TDLS"""
hostapd.add_ap(apdev[0]['ifname'],
hostapd.wpa_params(ssid="test-wpa-psk",
passphrase="12345678"))
wlantest_setup()
connect_2sta_wpa_psk(dev, apdev[0]['ifname'])
setup_tdls(dev[0], dev[1], apdev[0])
teardown_tdls(dev[0], dev[1], apdev[0])
setup_tdls(dev[1], dev[0], apdev[0])
def test_ap_wpa_mixed_tdls(dev, apdev):
"""WPA+WPA2-PSK AP and two stations using TDLS"""
hostapd.add_ap(apdev[0]['ifname'],
hostapd.wpa_mixed_params(ssid="test-wpa-mixed-psk",
passphrase="12345678"))
wlantest_setup()
connect_2sta_wpa_psk_mixed(dev, apdev[0]['ifname'])
setup_tdls(dev[0], dev[1], apdev[0])
teardown_tdls(dev[0], dev[1], apdev[0])
setup_tdls(dev[1], dev[0], apdev[0])
def test_ap_wep_tdls(dev, apdev):
"""WEP AP and two stations using TDLS"""
hostapd.add_ap(apdev[0]['ifname'],
{ "ssid": "test-wep", "wep_key0": '"hello"' })
wlantest_setup()
connect_2sta_wep(dev, apdev[0]['ifname'])
setup_tdls(dev[0], dev[1], apdev[0])
teardown_tdls(dev[0], dev[1], apdev[0])
setup_tdls(dev[1], dev[0], apdev[0])
def test_ap_open_tdls(dev, apdev):
"""Open AP and two stations using TDLS"""
hostapd.add_ap(apdev[0]['ifname'], { "ssid": "test-open" })
wlantest_setup()
connect_2sta_open(dev, apdev[0]['ifname'])
setup_tdls(dev[0], dev[1], apdev[0])
teardown_tdls(dev[0], dev[1], apdev[0])
setup_tdls(dev[1], dev[0], apdev[0])
| [
"[email protected]"
] | |
2b1638999ca31a03dac9b7ae400bf86815b43735 | 0e8dd5901b1f98934c44a85b133eb7ca6f44b4b9 | /osr2mp4/Parser/osrparser.py | 398325f8b59fc702a5af1c87a8170c270acdc087 | [] | no_license | Hazuki-san/osr2mp4-core | dbd2f4d44a3d0e90974214c97b434dcbb2eedd18 | 83dc5c47bc73dcb0b4d4b6a5ae1924771c13c623 | refs/heads/master | 2022-11-24T13:41:15.703261 | 2020-07-03T14:00:54 | 2020-07-03T14:00:54 | 279,099,127 | 1 | 0 | null | 2020-07-12T16:02:35 | 2020-07-12T16:02:34 | null | UTF-8 | Python | false | false | 2,041 | py | import osrparse
# index for replay_event
from ..CheckSystem.Judgement import DiffCalculator
# noinspection PyTypeChecker
from ..EEnum.EReplay import Replays
def setupReplay(osrfile, beatmap):
replay_info = osrparse.parse_replay_file(osrfile)
replay_data = [None] * len(replay_info.play_data)
start_time = beatmap.start_time
total_time = 0
start_index = 1
start_osr = start_time - 3000
for index in range(len(replay_data)):
times = replay_info.play_data[index].time_since_previous_action
total_time += times
# if total_time >= end_osr:
# break
# end_index += 1
if total_time < start_osr:
start_index += + 1 # to crop later, everything before we can ignore
continue
replay_data[index] = [None, None, None, None]
replay_data[index][Replays.CURSOR_X] = replay_info.play_data[index].x
replay_data[index][Replays.CURSOR_Y] = replay_info.play_data[index].y
replay_data[index][Replays.KEYS_PRESSED] = replay_info.play_data[index].keys_pressed
replay_data[index][Replays.TIMES] = total_time
replay_data = replay_data[start_index:-1]
replay_data.sort(key=lambda x: x[Replays.TIMES]) # sort replay data based on time
start_time = replay_data[0][Replays.TIMES]
for x in range(10):
replay_data.append([replay_data[-1][Replays.CURSOR_X], replay_data[-1][Replays.CURSOR_Y], 0, max(replay_data[-1][Replays.TIMES], int(beatmap.end_time + 1000) + 17 * x)])
diffcalculator = DiffCalculator(beatmap.diff)
timepreempt = diffcalculator.ar()
if replay_data[0][Replays.TIMES] > beatmap.hitobjects[0]["time"] - timepreempt - 2000:
startdata = replay_data[0].copy()
startdata[Replays.TIMES] = beatmap.hitobjects[0]["time"] - timepreempt - 2000
replay_data.insert(0, startdata)
replay_data.append([0, 0, 0, replay_data[-1][3] * 5])
replay_data.append([0, 0, 0, replay_data[-1][3] * 5])
start_time = replay_data[0][Replays.TIMES]
beatmap.breakperiods.append({"Start": int(beatmap.end_time + 200), "End": replay_data[-1][Replays.TIMES] + 100, "Arrow": False})
return replay_data, start_time
| [
"[email protected]"
] | |
3ec57b2531ab6a937124feee52ac1b6da03020ff | b5ef3b9da130f604f111bd469128b73e78d6ba9d | /bt5/erp5_accounting/SkinTemplateItem/portal_skins/erp5_accounting/AccountingTransactionLine_getProjectItemList.py | 43b31ee9f20c5d1e877c6974693c15afe6a456ef | [] | no_license | soediro/erp5 | 154bb2057c4cd12c14018c1ab2a09a78b2d2386a | 3d1a8811007a363b7a43df4b295b5e0965c2d125 | refs/heads/master | 2021-01-11T00:31:05.445267 | 2016-10-05T09:28:05 | 2016-10-07T02:59:00 | 70,526,968 | 1 | 0 | null | 2016-10-10T20:40:41 | 2016-10-10T20:40:40 | null | UTF-8 | Python | false | false | 1,341 | py | """Returns all validated projects.
This script is indented to be used on custom listfields for accounting lines, and on reports.
If this script returns an empty list, it means that reports by project are disabled.
"""
from Products.ERP5Type.Message import translateString
portal = context.getPortalObject()
# case 1: script is used for reports, we display all validated projects.
if context.getPortalType() == 'Accounting Transaction Module':
project_list = []
for project in portal.portal_catalog(
portal_type='Project',
select_list=['relative_url', 'title', 'reference'],
validation_state=('validated',),
sort_on=(('title', 'ASC'),)):
if project.reference:
project_list.append(('%s - %s' % (project.reference, project.title), project.relative_url,))
else:
project_list.append((project.title, project.relative_url,))
if not project_list:
return [] # returning an empty list, not to add project column on reports
return [('', ''), (translateString('No Project'), 'None')] + project_list
# case 2: script is used on custom listfields.
# for now the script has to be customized in such case.
# [(x.getTitle(), x.getRelativeUrl()) for x in context.project_module.searchFolder()]
return [('', '')]
| [
"[email protected]"
] | |
2309594889f7eaf0567ecb3881e41ae564d04ca9 | 22fa0db584e41a9f06d829a5dd06b32bcdeb3646 | /registration/mobile_backend.py | dfb3c3b608616dec696f7b31dceed277b810a84c | [] | no_license | muthuraj-python/muthuinterview | 186ecd4efd173fa0fe7ce25e721ebadbeabc94f7 | d51b03f90fea9abd6aa763cf666dd39ead7a4753 | refs/heads/master | 2021-02-22T07:24:10.319876 | 2020-03-15T13:37:16 | 2020-03-15T13:37:16 | 245,371,812 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 696 | py | from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
class MobileBackend(ModelBackend):
def authenticate(self, request, **kwargs):
UserModel = get_user_model()
try:
user = UserModel.objects.get(mobile_number=kwargs.get('username'))
except UserModel.DoesNotExist:
return None
else:
if user.check_password(kwargs.get('password')):
return user
return None
def get_user(self, user_id):
UserModel = get_user_model()
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
| [
"[email protected]"
] | |
64cd0072445b206c8f730510ee8bb8e3f9fedfcd | 7765acf96f3c334a073f647ead1e6c862046fd41 | /tex/figures/photometry_nullspace.py | 6cb74974a958ef681cf8425500f031c951b3c321 | [
"MIT"
] | permissive | rodluger/fishy | da752a2e7aab159b10d24eb0433251b8485d6983 | 94bb393b01ade283e416589ea013d604166749e3 | refs/heads/master | 2020-05-06T15:15:44.725979 | 2019-07-08T13:19:04 | 2019-07-08T13:19:04 | 180,179,751 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,408 | py | # -*- coding: utf-8 -*-
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import starry
ydeg = 10
theta = np.linspace(-180, 180, 1000)
s = np.zeros(((ydeg + 1) ** 2, len(theta)))
map = starry.Map(ydeg, lazy=False)
n = 0
for l in range(ydeg + 1):
for m in range(-l, l + 1):
map.reset()
if l > 0:
map[l, m] = 1.0
s[n] = map.flux(theta=theta)
n += 1
# Set up the plot
fig, ax = plt.subplots(ydeg + 1, 2 * ydeg + 1, figsize=(16, 10),
sharex=True, sharey=True)
fig.subplots_adjust(hspace=0)
for axis in ax.flatten():
axis.spines['top'].set_visible(False)
axis.spines['right'].set_visible(False)
axis.spines['bottom'].set_visible(False)
axis.spines['left'].set_visible(False)
axis.set_xticks([])
axis.set_yticks([])
# Loop over the orders and degrees
n = 0
for i, l in enumerate(range(ydeg + 1)):
for j, m in enumerate(range(-l, l + 1)):
j += ydeg - l
ax[i, j].plot(s[n])
n += 1
# Labels
for j, m in enumerate(range(-ydeg, ydeg + 1)):
ax[-1, j].set_xlabel("%d" % m, fontsize=14, fontweight="bold", alpha=0.5)
for i, l in enumerate(range(ydeg + 1)):
ax[i, ydeg - l].set_ylabel("%d" % l, fontsize=14, fontweight="bold",
rotation=45, labelpad=20, alpha=0.5)
# Save
fig.savefig("photometry_nullspace.pdf", bbox_inches="tight") | [
"[email protected]"
] | |
3bd32d35fa515727b1d2fdf19b6da268ce008399 | 5a42ce780721294d113335712d45c62a88725109 | /project/graphdata/module/yiyiyuan/model/yi_favorite_contacts.py | 03a9050f0826ae4d25e32f4863e189e8bd71ffc5 | [] | no_license | P79N6A/project_code | d2a933d53deb0b4e0bcba97834de009e7bb78ad0 | 1b0e863ff3977471f5a94ef7d990796a9e9669c4 | refs/heads/master | 2020-04-16T02:06:57.317540 | 2019-01-11T07:02:05 | 2019-01-11T07:02:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,048 | py | # -*- coding: utf-8 -*-
# sqlacodegen mysql://root:123!@#@127.0.0.1/xhh_test --outfile yyy.py --flask
from lib.application import db
from .base_model import BaseModel
from .yi_user import YiUser
class YiFavoriteContact(db.Model, BaseModel):
__bind_key__ = 'xhh_yiyiyuan'
__tablename__ = 'yi_favorite_contacts'
id = db.Column(db.BigInteger, primary_key=True)
user_id = db.Column(db.BigInteger, nullable=False)
contacts_name = db.Column(db.String(20), nullable=False)
mobile = db.Column(db.String(20), nullable=False)
relatives_name = db.Column(db.String(20), nullable=False)
phone = db.Column(db.String(20), nullable=False)
last_modify_time = db.Column(db.DateTime, nullable=False)
create_time = db.Column(db.DateTime)
def getByUserId(self, user_id):
return self.query.filter_by(user_id=user_id).limit(1).first()
def contactDue(self, dbContact):
# 亲属联系人是否逾期
if dbContact is None:
return {}
mobiles = []
#亲属
if 'phone' in dbContact.keys():
mobiles.append(str(dbContact['phone']))
#常用
if 'mobile' in dbContact.keys():
mobiles.append(str(dbContact['mobile']))
if len(mobiles) == 0:
return {}
oUser = YiUser()
overdue_users = oUser.isOverdueMobile(mobiles)
overdue_mobiles = [user[0] for user in overdue_users]
# # 判断是否逾期
contract_due_data = {}
if 'phone' in dbContact.keys() and dbContact['phone'] is not None:
if dbContact['phone'] in overdue_mobiles:
contract_due_data['com_r_overdue'] = 1
else:
contract_due_data['com_r_overdue'] = 0
if 'mobile' in dbContact.keys() and dbContact['mobile'] is not None:
if dbContact['mobile'] in overdue_mobiles:
contract_due_data['com_c_overdue'] = 1
else:
contract_due_data['com_c_overdue'] = 0
return contract_due_data
| [
"[email protected]"
] | |
1603f794730d82775b9c08df1c1c6289ae8cf270 | f68cd225b050d11616ad9542dda60288f6eeccff | /testscripts/RDKB/component/WIFIHAL/TS_WIFIHAL_5GHzIsRadioAutoBlockAckEnabled.py | 342c1a28d36b0e908ebc40dcbfc9e62a2bb7a193 | [
"Apache-2.0"
] | permissive | cablelabs/tools-tdkb | 18fb98fadcd169fa9000db8865285fbf6ff8dc9d | 1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69 | refs/heads/master | 2020-03-28T03:06:50.595160 | 2018-09-04T11:11:00 | 2018-09-05T00:24:38 | 147,621,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,337 | py | ##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2017 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version="1.0" encoding="UTF-8"?><xml>
<id/>
<version>3</version>
<name>TS_WIFIHAL_5GHzIsRadioAutoBlockAckEnabled</name>
<primitive_test_id/>
<primitive_test_name>WIFIHAL_GetOrSetParamBoolValue</primitive_test_name>
<primitive_test_version>1</primitive_test_version>
<status>FREE</status>
<synopsis>Check AutoBlock-ACK enable status by using wifi_getRadioAutoBlockAckEnable HAL API</synopsis>
<groups_id>4</groups_id>
<execution_time>10</execution_time>
<long_duration>false</long_duration>
<advanced_script>false</advanced_script>
<remarks/>
<skip>false</skip>
<box_types>
<box_type>Broadband</box_type>
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
</rdk_versions>
<test_cases>
<test_case_id>TC_WIFIHAL_94</test_case_id>
<test_objective>Check AutoBlock-ACK enable status by using wifi_getRadioAutoBlockAckEnable HAL API</test_objective>
<test_type>Positive</test_type>
<test_setup>XB3. XB6</test_setup>
<pre_requisite>1.Ccsp Components should be in a running state else invoke cosa_start.sh manually that includes all the ccsp components and TDK Component
2.TDK Agent should be in running state or invoke it through StartTdk.sh script</pre_requisite>
<api_or_interface_used>wifi_getRadioAutoBlockAckEnable()</api_or_interface_used>
<input_parameters>methodName : getRadioAutoBlockAckEnable
methodName : setRadioAutoBlockAckEnable
radioIndex : 1</input_parameters>
<automation_approch>1.Configure the Function info in Test Manager GUI which needs to be tested
(WIFIHAL_GetOrSetParamBoolValue - func name - "If not exists already"
WIFIHAL - module name
Necessary I/P args as Mentioned in Input)
2.Python Script will be generated/overrided automatically by Test Manager with provided arguments in configure page (TS_WIFIHAL_5GHzIsRadioAutoBlockAckEnabled.py)
3.Execute the generated Script(TS_WIFIHAL_5GHzIsRadioAutoBlockAckEnabled.py) using execution page of Test Manager GUI
4.wifihalstub which is a part of TDK Agent process, will be in listening mode to execute TDK Component function named WIFIHAL_GetOrSetParamBoolValue through registered TDK wifihalstub function along with necessary arguments
5.WIFIHAL_GetOrSetParamBoolValue function will call Ccsp Base Function named "ssp_WIFIHALGetOrSetParamBoolValue", that inturn will call WIFIHAL Library Functions
wifi_getRadioAutoBlockAckEnable() and wifi_setRadioAutoBlockAckEnable()
6.Response(s)(printf) from TDK Component,Ccsp Library function and wifihalstub would be logged in Agent Console log based on the debug info redirected to agent console
7.wifihalstub will validate the available result (from agent console log and Pointer to instance as updated) with expected result
8.Test Manager will publish the result in GUI as SUCCESS/FAILURE based on the response from wifihalstub</automation_approch>
<except_output>CheckPoint
1:wifi_getRadioAutoBlockAckEnable log from DUT should be available in Agent Console LogCheckPoint
2:TDK agent Test Function will log the test case result as PASS based on API response CheckPoint
3:Test Manager GUI will publish the result as SUCCESS in Execution page"""</except_output>
<priority>High</priority>
<test_stub_interface>WIFIHAL</test_stub_interface>
<test_script>TS_WIFIHAL_5GHzIsRadioAutoBlockAckEnabled</test_script>
<skipped>No</skipped>
<release_version/>
<remarks/>
</test_cases>
<script_tags/>
</xml>
'''
# use tdklib library,which provides a wrapper for tdk testcase script
import tdklib;
from wifiUtility import *;
#Test component to be tested
obj = tdklib.TDKScriptingLibrary("wifihal","1");
#IP and Port of box, No need to change,
#This will be replaced with correspoing Box Ip and port while executing script
ip = <ipaddress>
port = <port>
obj.configureTestCase(ip,port,'TS_WIFIHAL_5GHzIsRadioAutoBlockAckEnabled');
loadmodulestatus =obj.getLoadModuleResult();
print "[LIB LOAD STATUS] : %s" %loadmodulestatus
if "SUCCESS" in loadmodulestatus.upper():
obj.setLoadModuleStatus("SUCCESS");
expectedresult="SUCCESS";
radioIndex = 1
getMethod = "getRadioAutoBlockAckEnable"
primitive = 'WIFIHAL_GetOrSetParamBoolValue'
#Getting the default enable mode
tdkTestObj, actualresult, details = ExecuteWIFIHalCallMethod(obj, primitive, radioIndex, 0, getMethod)
if expectedresult in actualresult :
tdkTestObj.setResultStatus("SUCCESS");
enable = details.split(":")[1].strip()
if "Enabled" in enable:
print "Auto Block-Ack is Enabled for Radio 5GHz"
oldEnable = 1
newEnable = 0
else:
print "Auto Block-Ack is Disabled for Radio 5GHz "
oldEnable = 0
newEnable = 1
setMethod = "setRadioAutoBlockAckEnable"
#Toggle the enable status using set
tdkTestObj, actualresult, details = ExecuteWIFIHalCallMethod(obj, primitive, radioIndex, newEnable, setMethod)
if expectedresult in actualresult :
print "Enable state toggled using set"
# Get the New enable status
tdkTestObj, actualresult, details = ExecuteWIFIHalCallMethod(obj, primitive, radioIndex, 0, getMethod)
if expectedresult in actualresult and enable not in details.split(":")[1].strip():
print "getRadioAutoBlockAckEnable Success, verified Along with setRadioAutoBlockAckEnable() api"
#Revert back to original Enable status
tdkTestObj, actualresult, details = ExecuteWIFIHalCallMethod(obj, primitive, radioIndex, oldEnable, setMethod)
if expectedresult in actualresult :
print "Enable status reverted back";
else:
print "Couldn't revert enable status"
tdkTestObj.setResultStatus("FAILURE");
else:
print "getRadioAutoBlockAckEnable() failed after set function"
tdkTestObj.setResultStatus("FAILURE");
else:
print "setRadioAutoBlockAckEnable() failed"
tdkTestObj.setResultStatus("FAILURE");
else:
print "getRadioAutoBlockAckEnable() failed"
tdkTestObj.setResultStatus("FAILURE");
obj.unloadModule("wifihal");
else:
print "Failed to load wifi module";
obj.setLoadModuleStatus("FAILURE");
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.