blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1109161a39f73fe01e4a6f4099ad4dad4a0939bc | abdb582b9ab76eaf6df1fdb5843c24fa6fa1ede0 | /flendz_test/urls.py | 80bc3d35b33735c54f511c2ea63a1065e235799b | [] | no_license | jabykuniyil/flendz | 1375341ee97986842d962702e0f1ac7f6d48cae7 | ef952f9e14320b9c512b4047c6726ab9ff776120 | refs/heads/main | 2023-05-27T20:12:36.774259 | 2021-06-05T04:38:47 | 2021-06-05T04:38:47 | 372,798,247 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include('test_app.url')),
]
| [
"[email protected]"
] | |
153bedd6b9dfdb762195c22a86d2e1d6dddd83c5 | ce083128fa87ca86c65059893aa8882d088461f5 | /python/flask-mail-labs/.venv/lib/python2.7/site-packages/babel/dates.py | a8fadc7801e088692658f8de8729ba4a76a892aa | [] | no_license | marcosptf/fedora | 581a446e7f81d8ae9a260eafb92814bc486ee077 | 359db63ff1fa79696b7bc803bcfa0042bff8ab44 | refs/heads/master | 2023-04-06T14:53:40.378260 | 2023-03-26T00:47:52 | 2023-03-26T00:47:52 | 26,059,824 | 6 | 5 | null | 2022-12-08T00:43:21 | 2014-11-01T18:48:56 | null | UTF-8 | Python | false | false | 67,706 | py | # -*- coding: utf-8 -*-
"""
babel.dates
~~~~~~~~~~~
Locale dependent formatting and parsing of dates and times.
The default locale for the functions in this module is determined by the
following environment variables, in that order:
* ``LC_TIME``,
* ``LC_ALL``, and
* ``LANG``
:copyright: (c) 2013 by the Babel Team.
:license: BSD, see LICENSE for more details.
"""
from __future__ import division
import re
import warnings
import pytz as _pytz
from datetime import date, datetime, time, timedelta
from bisect import bisect_right
from babel.core import default_locale, get_global, Locale
from babel.util import UTC, LOCALTZ
from babel._compat import string_types, integer_types, number_types
LC_TIME = default_locale('LC_TIME')
# Aliases for use in scopes where the modules are shadowed by local variables
date_ = date
datetime_ = datetime
time_ = time
def _get_dt_and_tzinfo(dt_or_tzinfo):
"""
Parse a `dt_or_tzinfo` value into a datetime and a tzinfo.
See the docs for this function's callers for semantics.
:rtype: tuple[datetime, tzinfo]
"""
if dt_or_tzinfo is None:
dt = datetime.now()
tzinfo = LOCALTZ
elif isinstance(dt_or_tzinfo, string_types):
dt = None
tzinfo = get_timezone(dt_or_tzinfo)
elif isinstance(dt_or_tzinfo, integer_types):
dt = None
tzinfo = UTC
elif isinstance(dt_or_tzinfo, (datetime, time)):
dt = _get_datetime(dt_or_tzinfo)
if dt.tzinfo is not None:
tzinfo = dt.tzinfo
else:
tzinfo = UTC
else:
dt = None
tzinfo = dt_or_tzinfo
return dt, tzinfo
def _get_datetime(instant):
"""
Get a datetime out of an "instant" (date, time, datetime, number).
.. warning:: The return values of this function may depend on the system clock.
If the instant is None, the current moment is used.
If the instant is a time, it's augmented with today's date.
Dates are converted to naive datetimes with midnight as the time component.
>>> _get_datetime(date(2015, 1, 1))
datetime.datetime(2015, 1, 1, 0, 0)
UNIX timestamps are converted to datetimes.
>>> _get_datetime(1400000000)
datetime.datetime(2014, 5, 13, 16, 53, 20)
Other values are passed through as-is.
>>> x = datetime(2015, 1, 1)
>>> _get_datetime(x) is x
True
:param instant: date, time, datetime, integer, float or None
:type instant: date|time|datetime|int|float|None
:return: a datetime
:rtype: datetime
"""
if instant is None:
return datetime_.utcnow()
elif isinstance(instant, integer_types) or isinstance(instant, float):
return datetime_.utcfromtimestamp(instant)
elif isinstance(instant, time):
return datetime_.combine(date.today(), instant)
elif isinstance(instant, date) and not isinstance(instant, datetime):
return datetime_.combine(instant, time())
# TODO (3.x): Add an assertion/type check for this fallthrough branch:
return instant
def _ensure_datetime_tzinfo(datetime, tzinfo=None):
"""
Ensure the datetime passed has an attached tzinfo.
If the datetime is tz-naive to begin with, UTC is attached.
If a tzinfo is passed in, the datetime is normalized to that timezone.
>>> _ensure_datetime_tzinfo(datetime(2015, 1, 1)).tzinfo.zone
'UTC'
>>> tz = get_timezone("Europe/Stockholm")
>>> _ensure_datetime_tzinfo(datetime(2015, 1, 1, 13, 15, tzinfo=UTC), tzinfo=tz).hour
14
:param datetime: Datetime to augment.
:param tzinfo: Optional tznfo.
:return: datetime with tzinfo
:rtype: datetime
"""
if datetime.tzinfo is None:
datetime = datetime.replace(tzinfo=UTC)
if tzinfo is not None:
datetime = datetime.astimezone(get_timezone(tzinfo))
if hasattr(tzinfo, 'normalize'): # pytz
datetime = tzinfo.normalize(datetime)
return datetime
def _get_time(time, tzinfo=None):
"""
Get a timezoned time from a given instant.
.. warning:: The return values of this function may depend on the system clock.
:param time: time, datetime or None
:rtype: time
"""
if time is None:
time = datetime.utcnow()
elif isinstance(time, number_types):
time = datetime.utcfromtimestamp(time)
if time.tzinfo is None:
time = time.replace(tzinfo=UTC)
if isinstance(time, datetime):
if tzinfo is not None:
time = time.astimezone(tzinfo)
if hasattr(tzinfo, 'normalize'): # pytz
time = tzinfo.normalize(time)
time = time.timetz()
elif tzinfo is not None:
time = time.replace(tzinfo=tzinfo)
return time
def get_timezone(zone=None):
"""Looks up a timezone by name and returns it. The timezone object
returned comes from ``pytz`` and corresponds to the `tzinfo` interface and
can be used with all of the functions of Babel that operate with dates.
If a timezone is not known a :exc:`LookupError` is raised. If `zone`
is ``None`` a local zone object is returned.
:param zone: the name of the timezone to look up. If a timezone object
itself is passed in, mit's returned unchanged.
"""
if zone is None:
return LOCALTZ
if not isinstance(zone, string_types):
return zone
try:
return _pytz.timezone(zone)
except _pytz.UnknownTimeZoneError:
raise LookupError('Unknown timezone %s' % zone)
def get_next_timezone_transition(zone=None, dt=None):
"""Given a timezone it will return a :class:`TimezoneTransition` object
that holds the information about the next timezone transition that's going
to happen. For instance this can be used to detect when the next DST
change is going to happen and how it looks like.
The transition is calculated relative to the given datetime object. The
next transition that follows the date is used. If a transition cannot
be found the return value will be `None`.
Transition information can only be provided for timezones returned by
the :func:`get_timezone` function.
:param zone: the timezone for which the transition should be looked up.
If not provided the local timezone is used.
:param dt: the date after which the next transition should be found.
If not given the current time is assumed.
"""
zone = get_timezone(zone)
dt = _get_datetime(dt).replace(tzinfo=None)
if not hasattr(zone, '_utc_transition_times'):
raise TypeError('Given timezone does not have UTC transition '
'times. This can happen because the operating '
'system fallback local timezone is used or a '
'custom timezone object')
try:
idx = max(0, bisect_right(zone._utc_transition_times, dt))
old_trans = zone._transition_info[idx - 1]
new_trans = zone._transition_info[idx]
old_tz = zone._tzinfos[old_trans]
new_tz = zone._tzinfos[new_trans]
except (LookupError, ValueError):
return None
return TimezoneTransition(
activates=zone._utc_transition_times[idx],
from_tzinfo=old_tz,
to_tzinfo=new_tz,
reference_date=dt
)
class TimezoneTransition(object):
"""A helper object that represents the return value from
:func:`get_next_timezone_transition`.
"""
def __init__(self, activates, from_tzinfo, to_tzinfo, reference_date=None):
#: the time of the activation of the timezone transition in UTC.
self.activates = activates
#: the timezone from where the transition starts.
self.from_tzinfo = from_tzinfo
#: the timezone for after the transition.
self.to_tzinfo = to_tzinfo
#: the reference date that was provided. This is the `dt` parameter
#: to the :func:`get_next_timezone_transition`.
self.reference_date = reference_date
@property
def from_tz(self):
"""The name of the timezone before the transition."""
return self.from_tzinfo._tzname
@property
def to_tz(self):
"""The name of the timezone after the transition."""
return self.to_tzinfo._tzname
@property
def from_offset(self):
"""The UTC offset in seconds before the transition."""
return int(self.from_tzinfo._utcoffset.total_seconds())
@property
def to_offset(self):
"""The UTC offset in seconds after the transition."""
return int(self.to_tzinfo._utcoffset.total_seconds())
def __repr__(self):
return '<TimezoneTransition %s -> %s (%s)>' % (
self.from_tz,
self.to_tz,
self.activates,
)
def get_period_names(width='wide', context='stand-alone', locale=LC_TIME):
"""Return the names for day periods (AM/PM) used by the locale.
>>> get_period_names(locale='en_US')['am']
u'AM'
:param width: the width to use, one of "abbreviated", "narrow", or "wide"
:param context: the context, either "format" or "stand-alone"
:param locale: the `Locale` object, or a locale string
"""
return Locale.parse(locale).day_periods[context][width]
def get_day_names(width='wide', context='format', locale=LC_TIME):
"""Return the day names used by the locale for the specified format.
>>> get_day_names('wide', locale='en_US')[1]
u'Tuesday'
>>> get_day_names('short', locale='en_US')[1]
u'Tu'
>>> get_day_names('abbreviated', locale='es')[1]
u'mar.'
>>> get_day_names('narrow', context='stand-alone', locale='de_DE')[1]
u'D'
:param width: the width to use, one of "wide", "abbreviated", "short" or "narrow"
:param context: the context, either "format" or "stand-alone"
:param locale: the `Locale` object, or a locale string
"""
return Locale.parse(locale).days[context][width]
def get_month_names(width='wide', context='format', locale=LC_TIME):
"""Return the month names used by the locale for the specified format.
>>> get_month_names('wide', locale='en_US')[1]
u'January'
>>> get_month_names('abbreviated', locale='es')[1]
u'ene.'
>>> get_month_names('narrow', context='stand-alone', locale='de_DE')[1]
u'J'
:param width: the width to use, one of "wide", "abbreviated", or "narrow"
:param context: the context, either "format" or "stand-alone"
:param locale: the `Locale` object, or a locale string
"""
return Locale.parse(locale).months[context][width]
def get_quarter_names(width='wide', context='format', locale=LC_TIME):
"""Return the quarter names used by the locale for the specified format.
>>> get_quarter_names('wide', locale='en_US')[1]
u'1st quarter'
>>> get_quarter_names('abbreviated', locale='de_DE')[1]
u'Q1'
>>> get_quarter_names('narrow', locale='de_DE')[1]
u'1'
:param width: the width to use, one of "wide", "abbreviated", or "narrow"
:param context: the context, either "format" or "stand-alone"
:param locale: the `Locale` object, or a locale string
"""
return Locale.parse(locale).quarters[context][width]
def get_era_names(width='wide', locale=LC_TIME):
"""Return the era names used by the locale for the specified format.
>>> get_era_names('wide', locale='en_US')[1]
u'Anno Domini'
>>> get_era_names('abbreviated', locale='de_DE')[1]
u'n. Chr.'
:param width: the width to use, either "wide", "abbreviated", or "narrow"
:param locale: the `Locale` object, or a locale string
"""
return Locale.parse(locale).eras[width]
def get_date_format(format='medium', locale=LC_TIME):
"""Return the date formatting patterns used by the locale for the specified
format.
>>> get_date_format(locale='en_US')
<DateTimePattern u'MMM d, y'>
>>> get_date_format('full', locale='de_DE')
<DateTimePattern u'EEEE, d. MMMM y'>
:param format: the format to use, one of "full", "long", "medium", or
"short"
:param locale: the `Locale` object, or a locale string
"""
return Locale.parse(locale).date_formats[format]
def get_datetime_format(format='medium', locale=LC_TIME):
"""Return the datetime formatting patterns used by the locale for the
specified format.
>>> get_datetime_format(locale='en_US')
u'{1}, {0}'
:param format: the format to use, one of "full", "long", "medium", or
"short"
:param locale: the `Locale` object, or a locale string
"""
patterns = Locale.parse(locale).datetime_formats
if format not in patterns:
format = None
return patterns[format]
def get_time_format(format='medium', locale=LC_TIME):
"""Return the time formatting patterns used by the locale for the specified
format.
>>> get_time_format(locale='en_US')
<DateTimePattern u'h:mm:ss a'>
>>> get_time_format('full', locale='de_DE')
<DateTimePattern u'HH:mm:ss zzzz'>
:param format: the format to use, one of "full", "long", "medium", or
"short"
:param locale: the `Locale` object, or a locale string
"""
return Locale.parse(locale).time_formats[format]
def get_timezone_gmt(datetime=None, width='long', locale=LC_TIME, return_z=False):
"""Return the timezone associated with the given `datetime` object formatted
as string indicating the offset from GMT.
>>> dt = datetime(2007, 4, 1, 15, 30)
>>> get_timezone_gmt(dt, locale='en')
u'GMT+00:00'
>>> get_timezone_gmt(dt, locale='en', return_z=True)
'Z'
>>> get_timezone_gmt(dt, locale='en', width='iso8601_short')
u'+00'
>>> tz = get_timezone('America/Los_Angeles')
>>> dt = tz.localize(datetime(2007, 4, 1, 15, 30))
>>> get_timezone_gmt(dt, locale='en')
u'GMT-07:00'
>>> get_timezone_gmt(dt, 'short', locale='en')
u'-0700'
>>> get_timezone_gmt(dt, locale='en', width='iso8601_short')
u'-07'
The long format depends on the locale, for example in France the acronym
UTC string is used instead of GMT:
>>> get_timezone_gmt(dt, 'long', locale='fr_FR')
u'UTC-07:00'
.. versionadded:: 0.9
:param datetime: the ``datetime`` object; if `None`, the current date and
time in UTC is used
:param width: either "long" or "short" or "iso8601" or "iso8601_short"
:param locale: the `Locale` object, or a locale string
:param return_z: True or False; Function returns indicator "Z"
when local time offset is 0
"""
datetime = _ensure_datetime_tzinfo(_get_datetime(datetime))
locale = Locale.parse(locale)
offset = datetime.tzinfo.utcoffset(datetime)
seconds = offset.days * 24 * 60 * 60 + offset.seconds
hours, seconds = divmod(seconds, 3600)
if return_z and hours == 0 and seconds == 0:
return 'Z'
elif seconds == 0 and width == 'iso8601_short':
return u'%+03d' % hours
elif width == 'short' or width == 'iso8601_short':
pattern = u'%+03d%02d'
elif width == 'iso8601':
pattern = u'%+03d:%02d'
else:
pattern = locale.zone_formats['gmt'] % '%+03d:%02d'
return pattern % (hours, seconds // 60)
def get_timezone_location(dt_or_tzinfo=None, locale=LC_TIME, return_city=False):
u"""Return a representation of the given timezone using "location format".
The result depends on both the local display name of the country and the
city associated with the time zone:
>>> tz = get_timezone('America/St_Johns')
>>> print(get_timezone_location(tz, locale='de_DE'))
Kanada (St. John’s) Zeit
>>> print(get_timezone_location(tz, locale='en'))
Canada (St. John’s) Time
>>> print(get_timezone_location(tz, locale='en', return_city=True))
St. John’s
>>> tz = get_timezone('America/Mexico_City')
>>> get_timezone_location(tz, locale='de_DE')
u'Mexiko (Mexiko-Stadt) Zeit'
If the timezone is associated with a country that uses only a single
timezone, just the localized country name is returned:
>>> tz = get_timezone('Europe/Berlin')
>>> get_timezone_name(tz, locale='de_DE')
u'Mitteleurop\\xe4ische Zeit'
.. versionadded:: 0.9
:param dt_or_tzinfo: the ``datetime`` or ``tzinfo`` object that determines
the timezone; if `None`, the current date and time in
UTC is assumed
:param locale: the `Locale` object, or a locale string
:param return_city: True or False, if True then return exemplar city (location)
for the time zone
:return: the localized timezone name using location format
"""
dt, tzinfo = _get_dt_and_tzinfo(dt_or_tzinfo)
locale = Locale.parse(locale)
if hasattr(tzinfo, 'zone'):
zone = tzinfo.zone
else:
zone = tzinfo.tzname(dt or datetime.utcnow())
# Get the canonical time-zone code
zone = get_global('zone_aliases').get(zone, zone)
info = locale.time_zones.get(zone, {})
# Otherwise, if there is only one timezone for the country, return the
# localized country name
region_format = locale.zone_formats['region']
territory = get_global('zone_territories').get(zone)
if territory not in locale.territories:
territory = 'ZZ' # invalid/unknown
territory_name = locale.territories[territory]
if not return_city and territory and len(get_global('territory_zones').get(territory, [])) == 1:
return region_format % (territory_name)
# Otherwise, include the city in the output
fallback_format = locale.zone_formats['fallback']
if 'city' in info:
city_name = info['city']
else:
metazone = get_global('meta_zones').get(zone)
metazone_info = locale.meta_zones.get(metazone, {})
if 'city' in metazone_info:
city_name = metazone_info['city']
elif '/' in zone:
city_name = zone.split('/', 1)[1].replace('_', ' ')
else:
city_name = zone.replace('_', ' ')
if return_city:
return city_name
return region_format % (fallback_format % {
'0': city_name,
'1': territory_name
})
def get_timezone_name(dt_or_tzinfo=None, width='long', uncommon=False,
locale=LC_TIME, zone_variant=None, return_zone=False):
r"""Return the localized display name for the given timezone. The timezone
may be specified using a ``datetime`` or `tzinfo` object.
>>> dt = time(15, 30, tzinfo=get_timezone('America/Los_Angeles'))
>>> get_timezone_name(dt, locale='en_US')
u'Pacific Standard Time'
>>> get_timezone_name(dt, locale='en_US', return_zone=True)
'America/Los_Angeles'
>>> get_timezone_name(dt, width='short', locale='en_US')
u'PST'
If this function gets passed only a `tzinfo` object and no concrete
`datetime`, the returned display name is indenpendent of daylight savings
time. This can be used for example for selecting timezones, or to set the
time of events that recur across DST changes:
>>> tz = get_timezone('America/Los_Angeles')
>>> get_timezone_name(tz, locale='en_US')
u'Pacific Time'
>>> get_timezone_name(tz, 'short', locale='en_US')
u'PT'
If no localized display name for the timezone is available, and the timezone
is associated with a country that uses only a single timezone, the name of
that country is returned, formatted according to the locale:
>>> tz = get_timezone('Europe/Berlin')
>>> get_timezone_name(tz, locale='de_DE')
u'Mitteleurop\xe4ische Zeit'
>>> get_timezone_name(tz, locale='pt_BR')
u'Hor\xe1rio da Europa Central'
On the other hand, if the country uses multiple timezones, the city is also
included in the representation:
>>> tz = get_timezone('America/St_Johns')
>>> get_timezone_name(tz, locale='de_DE')
u'Neufundland-Zeit'
Note that short format is currently not supported for all timezones and
all locales. This is partially because not every timezone has a short
code in every locale. In that case it currently falls back to the long
format.
For more information see `LDML Appendix J: Time Zone Display Names
<http://www.unicode.org/reports/tr35/#Time_Zone_Fallback>`_
.. versionadded:: 0.9
.. versionchanged:: 1.0
Added `zone_variant` support.
:param dt_or_tzinfo: the ``datetime`` or ``tzinfo`` object that determines
the timezone; if a ``tzinfo`` object is used, the
resulting display name will be generic, i.e.
independent of daylight savings time; if `None`, the
current date in UTC is assumed
:param width: either "long" or "short"
:param uncommon: deprecated and ignored
:param zone_variant: defines the zone variation to return. By default the
variation is defined from the datetime object
passed in. If no datetime object is passed in, the
``'generic'`` variation is assumed. The following
values are valid: ``'generic'``, ``'daylight'`` and
``'standard'``.
:param locale: the `Locale` object, or a locale string
:param return_zone: True or False. If true then function
returns long time zone ID
"""
dt, tzinfo = _get_dt_and_tzinfo(dt_or_tzinfo)
locale = Locale.parse(locale)
if hasattr(tzinfo, 'zone'):
zone = tzinfo.zone
else:
zone = tzinfo.tzname(dt)
if zone_variant is None:
if dt is None:
zone_variant = 'generic'
else:
dst = tzinfo.dst(dt)
if dst:
zone_variant = 'daylight'
else:
zone_variant = 'standard'
else:
if zone_variant not in ('generic', 'standard', 'daylight'):
raise ValueError('Invalid zone variation')
# Get the canonical time-zone code
zone = get_global('zone_aliases').get(zone, zone)
if return_zone:
return zone
info = locale.time_zones.get(zone, {})
# Try explicitly translated zone names first
if width in info:
if zone_variant in info[width]:
return info[width][zone_variant]
metazone = get_global('meta_zones').get(zone)
if metazone:
metazone_info = locale.meta_zones.get(metazone, {})
if width in metazone_info:
if zone_variant in metazone_info[width]:
return metazone_info[width][zone_variant]
# If we have a concrete datetime, we assume that the result can't be
# independent of daylight savings time, so we return the GMT offset
if dt is not None:
return get_timezone_gmt(dt, width=width, locale=locale)
return get_timezone_location(dt_or_tzinfo, locale=locale)
def format_date(date=None, format='medium', locale=LC_TIME):
"""Return a date formatted according to the given pattern.
>>> d = date(2007, 4, 1)
>>> format_date(d, locale='en_US')
u'Apr 1, 2007'
>>> format_date(d, format='full', locale='de_DE')
u'Sonntag, 1. April 2007'
If you don't want to use the locale default formats, you can specify a
custom date pattern:
>>> format_date(d, "EEE, MMM d, ''yy", locale='en')
u"Sun, Apr 1, '07"
:param date: the ``date`` or ``datetime`` object; if `None`, the current
date is used
:param format: one of "full", "long", "medium", or "short", or a custom
date/time pattern
:param locale: a `Locale` object or a locale identifier
"""
if date is None:
date = date_.today()
elif isinstance(date, datetime):
date = date.date()
locale = Locale.parse(locale)
if format in ('full', 'long', 'medium', 'short'):
format = get_date_format(format, locale=locale)
pattern = parse_pattern(format)
return pattern.apply(date, locale)
def format_datetime(datetime=None, format='medium', tzinfo=None,
locale=LC_TIME):
r"""Return a date formatted according to the given pattern.
>>> dt = datetime(2007, 4, 1, 15, 30)
>>> format_datetime(dt, locale='en_US')
u'Apr 1, 2007, 3:30:00 PM'
For any pattern requiring the display of the time-zone, the third-party
``pytz`` package is needed to explicitly specify the time-zone:
>>> format_datetime(dt, 'full', tzinfo=get_timezone('Europe/Paris'),
... locale='fr_FR')
u'dimanche 1 avril 2007 \xe0 17:30:00 heure d\u2019\xe9t\xe9 d\u2019Europe centrale'
>>> format_datetime(dt, "yyyy.MM.dd G 'at' HH:mm:ss zzz",
... tzinfo=get_timezone('US/Eastern'), locale='en')
u'2007.04.01 AD at 11:30:00 EDT'
:param datetime: the `datetime` object; if `None`, the current date and
time is used
:param format: one of "full", "long", "medium", or "short", or a custom
date/time pattern
:param tzinfo: the timezone to apply to the time for display
:param locale: a `Locale` object or a locale identifier
"""
datetime = _ensure_datetime_tzinfo(_get_datetime(datetime), tzinfo)
locale = Locale.parse(locale)
if format in ('full', 'long', 'medium', 'short'):
return get_datetime_format(format, locale=locale) \
.replace("'", "") \
.replace('{0}', format_time(datetime, format, tzinfo=None,
locale=locale)) \
.replace('{1}', format_date(datetime, format, locale=locale))
else:
return parse_pattern(format).apply(datetime, locale)
def format_time(time=None, format='medium', tzinfo=None, locale=LC_TIME):
r"""Return a time formatted according to the given pattern.
>>> t = time(15, 30)
>>> format_time(t, locale='en_US')
u'3:30:00 PM'
>>> format_time(t, format='short', locale='de_DE')
u'15:30'
If you don't want to use the locale default formats, you can specify a
custom time pattern:
>>> format_time(t, "hh 'o''clock' a", locale='en')
u"03 o'clock PM"
For any pattern requiring the display of the time-zone a
timezone has to be specified explicitly:
>>> t = datetime(2007, 4, 1, 15, 30)
>>> tzinfo = get_timezone('Europe/Paris')
>>> t = tzinfo.localize(t)
>>> format_time(t, format='full', tzinfo=tzinfo, locale='fr_FR')
u'15:30:00 heure d\u2019\xe9t\xe9 d\u2019Europe centrale'
>>> format_time(t, "hh 'o''clock' a, zzzz", tzinfo=get_timezone('US/Eastern'),
... locale='en')
u"09 o'clock AM, Eastern Daylight Time"
As that example shows, when this function gets passed a
``datetime.datetime`` value, the actual time in the formatted string is
adjusted to the timezone specified by the `tzinfo` parameter. If the
``datetime`` is "naive" (i.e. it has no associated timezone information),
it is assumed to be in UTC.
These timezone calculations are **not** performed if the value is of type
``datetime.time``, as without date information there's no way to determine
what a given time would translate to in a different timezone without
information about whether daylight savings time is in effect or not. This
means that time values are left as-is, and the value of the `tzinfo`
parameter is only used to display the timezone name if needed:
>>> t = time(15, 30)
>>> format_time(t, format='full', tzinfo=get_timezone('Europe/Paris'),
... locale='fr_FR')
u'15:30:00 heure normale d\u2019Europe centrale'
>>> format_time(t, format='full', tzinfo=get_timezone('US/Eastern'),
... locale='en_US')
u'3:30:00 PM Eastern Standard Time'
:param time: the ``time`` or ``datetime`` object; if `None`, the current
time in UTC is used
:param format: one of "full", "long", "medium", or "short", or a custom
date/time pattern
:param tzinfo: the time-zone to apply to the time for display
:param locale: a `Locale` object or a locale identifier
"""
time = _get_time(time, tzinfo)
locale = Locale.parse(locale)
if format in ('full', 'long', 'medium', 'short'):
format = get_time_format(format, locale=locale)
return parse_pattern(format).apply(time, locale)
def format_skeleton(skeleton, datetime=None, tzinfo=None, fuzzy=True, locale=LC_TIME):
r"""Return a time and/or date formatted according to the given pattern.
The skeletons are defined in the CLDR data and provide more flexibility
than the simple short/long/medium formats, but are a bit harder to use.
The are defined using the date/time symbols without order or punctuation
and map to a suitable format for the given locale.
>>> t = datetime(2007, 4, 1, 15, 30)
>>> format_skeleton('MMMEd', t, locale='fr')
u'dim. 1 avr.'
>>> format_skeleton('MMMEd', t, locale='en')
u'Sun, Apr 1'
>>> format_skeleton('yMMd', t, locale='fi') # yMMd is not in the Finnish locale; yMd gets used
u'1.4.2007'
>>> format_skeleton('yMMd', t, fuzzy=False, locale='fi') # yMMd is not in the Finnish locale, an error is thrown
Traceback (most recent call last):
...
KeyError: yMMd
After the skeleton is resolved to a pattern `format_datetime` is called so
all timezone processing etc is the same as for that.
:param skeleton: A date time skeleton as defined in the cldr data.
:param datetime: the ``time`` or ``datetime`` object; if `None`, the current
time in UTC is used
:param tzinfo: the time-zone to apply to the time for display
:param fuzzy: If the skeleton is not found, allow choosing a skeleton that's
close enough to it.
:param locale: a `Locale` object or a locale identifier
"""
locale = Locale.parse(locale)
if fuzzy and skeleton not in locale.datetime_skeletons:
skeleton = match_skeleton(skeleton, locale.datetime_skeletons)
format = locale.datetime_skeletons[skeleton]
return format_datetime(datetime, format, tzinfo, locale)
TIMEDELTA_UNITS = (
('year', 3600 * 24 * 365),
('month', 3600 * 24 * 30),
('week', 3600 * 24 * 7),
('day', 3600 * 24),
('hour', 3600),
('minute', 60),
('second', 1)
)
def format_timedelta(delta, granularity='second', threshold=.85,
add_direction=False, format='long',
locale=LC_TIME):
"""Return a time delta according to the rules of the given locale.
>>> format_timedelta(timedelta(weeks=12), locale='en_US')
u'3 months'
>>> format_timedelta(timedelta(seconds=1), locale='es')
u'1 segundo'
The granularity parameter can be provided to alter the lowest unit
presented, which defaults to a second.
>>> format_timedelta(timedelta(hours=3), granularity='day',
... locale='en_US')
u'1 day'
The threshold parameter can be used to determine at which value the
presentation switches to the next higher unit. A higher threshold factor
means the presentation will switch later. For example:
>>> format_timedelta(timedelta(hours=23), threshold=0.9, locale='en_US')
u'1 day'
>>> format_timedelta(timedelta(hours=23), threshold=1.1, locale='en_US')
u'23 hours'
In addition directional information can be provided that informs
the user if the date is in the past or in the future:
>>> format_timedelta(timedelta(hours=1), add_direction=True, locale='en')
u'in 1 hour'
>>> format_timedelta(timedelta(hours=-1), add_direction=True, locale='en')
u'1 hour ago'
The format parameter controls how compact or wide the presentation is:
>>> format_timedelta(timedelta(hours=3), format='short', locale='en')
u'3 hr'
>>> format_timedelta(timedelta(hours=3), format='narrow', locale='en')
u'3h'
:param delta: a ``timedelta`` object representing the time difference to
format, or the delta in seconds as an `int` value
:param granularity: determines the smallest unit that should be displayed,
the value can be one of "year", "month", "week", "day",
"hour", "minute" or "second"
:param threshold: factor that determines at which point the presentation
switches to the next higher unit
:param add_direction: if this flag is set to `True` the return value will
include directional information. For instance a
positive timedelta will include the information about
it being in the future, a negative will be information
about the value being in the past.
:param format: the format, can be "narrow", "short" or "long". (
"medium" is deprecated, currently converted to "long" to
maintain compatibility)
:param locale: a `Locale` object or a locale identifier
"""
if format not in ('narrow', 'short', 'medium', 'long'):
raise TypeError('Format must be one of "narrow", "short" or "long"')
if format == 'medium':
warnings.warn('"medium" value for format param of format_timedelta'
' is deprecated. Use "long" instead',
category=DeprecationWarning)
format = 'long'
if isinstance(delta, timedelta):
seconds = int((delta.days * 86400) + delta.seconds)
else:
seconds = delta
locale = Locale.parse(locale)
def _iter_patterns(a_unit):
if add_direction:
unit_rel_patterns = locale._data['date_fields'][a_unit]
if seconds >= 0:
yield unit_rel_patterns['future']
else:
yield unit_rel_patterns['past']
a_unit = 'duration-' + a_unit
yield locale._data['unit_patterns'].get(a_unit, {}).get(format)
for unit, secs_per_unit in TIMEDELTA_UNITS:
value = abs(seconds) / secs_per_unit
if value >= threshold or unit == granularity:
if unit == granularity and value > 0:
value = max(1, value)
value = int(round(value))
plural_form = locale.plural_form(value)
pattern = None
for patterns in _iter_patterns(unit):
if patterns is not None:
pattern = patterns[plural_form]
break
# This really should not happen
if pattern is None:
return u''
return pattern.replace('{0}', str(value))
return u''
def _format_fallback_interval(start, end, skeleton, tzinfo, locale):
if skeleton in locale.datetime_skeletons: # Use the given skeleton
format = lambda dt: format_skeleton(skeleton, dt, tzinfo, locale=locale)
elif all((isinstance(d, date) and not isinstance(d, datetime)) for d in (start, end)): # Both are just dates
format = lambda dt: format_date(dt, locale=locale)
elif all((isinstance(d, time) and not isinstance(d, date)) for d in (start, end)): # Both are times
format = lambda dt: format_time(dt, tzinfo=tzinfo, locale=locale)
else:
format = lambda dt: format_datetime(dt, tzinfo=tzinfo, locale=locale)
formatted_start = format(start)
formatted_end = format(end)
if formatted_start == formatted_end:
return format(start)
return (
locale.interval_formats.get(None, "{0}-{1}").
replace("{0}", formatted_start).
replace("{1}", formatted_end)
)
def format_interval(start, end, skeleton=None, tzinfo=None, fuzzy=True, locale=LC_TIME):
"""
Format an interval between two instants according to the locale's rules.
>>> format_interval(date(2016, 1, 15), date(2016, 1, 17), "yMd", locale="fi")
u'15.\u201317.1.2016'
>>> format_interval(time(12, 12), time(16, 16), "Hm", locale="en_GB")
'12:12 \u2013 16:16'
>>> format_interval(time(5, 12), time(16, 16), "hm", locale="en_US")
'5:12 AM \u2013 4:16 PM'
>>> format_interval(time(16, 18), time(16, 24), "Hm", locale="it")
'16:18\u201316:24'
If the start instant equals the end instant, the interval is formatted like the instant.
>>> format_interval(time(16, 18), time(16, 18), "Hm", locale="it")
'16:18'
Unknown skeletons fall back to "default" formatting.
>>> format_interval(date(2015, 1, 1), date(2017, 1, 1), "wzq", locale="ja")
'2015/01/01\uff5e2017/01/01'
>>> format_interval(time(16, 18), time(16, 24), "xxx", locale="ja")
'16:18:00\uff5e16:24:00'
>>> format_interval(date(2016, 1, 15), date(2016, 1, 17), "xxx", locale="de")
'15.01.2016 \u2013 17.01.2016'
:param start: First instant (datetime/date/time)
:param end: Second instant (datetime/date/time)
:param skeleton: The "skeleton format" to use for formatting.
:param tzinfo: tzinfo to use (if none is already attached)
:param fuzzy: If the skeleton is not found, allow choosing a skeleton that's
close enough to it.
:param locale: A locale object or identifier.
:return: Formatted interval
"""
locale = Locale.parse(locale)
# NB: The quote comments below are from the algorithm description in
# http://www.unicode.org/reports/tr35/tr35-dates.html#intervalFormats
# > Look for the intervalFormatItem element that matches the "skeleton",
# > starting in the current locale and then following the locale fallback
# > chain up to, but not including root.
interval_formats = locale.interval_formats
if skeleton not in interval_formats or not skeleton:
# > If no match was found from the previous step, check what the closest
# > match is in the fallback locale chain, as in availableFormats. That
# > is, this allows for adjusting the string value field's width,
# > including adjusting between "MMM" and "MMMM", and using different
# > variants of the same field, such as 'v' and 'z'.
if skeleton and fuzzy:
skeleton = match_skeleton(skeleton, interval_formats)
else:
skeleton = None
if not skeleton: # Still no match whatsoever?
# > Otherwise, format the start and end datetime using the fallback pattern.
return _format_fallback_interval(start, end, skeleton, tzinfo, locale)
skel_formats = interval_formats[skeleton]
if start == end:
return format_skeleton(skeleton, start, tzinfo, fuzzy=fuzzy, locale=locale)
start = _ensure_datetime_tzinfo(_get_datetime(start), tzinfo=tzinfo)
end = _ensure_datetime_tzinfo(_get_datetime(end), tzinfo=tzinfo)
start_fmt = DateTimeFormat(start, locale=locale)
end_fmt = DateTimeFormat(end, locale=locale)
# > If a match is found from previous steps, compute the calendar field
# > with the greatest difference between start and end datetime. If there
# > is no difference among any of the fields in the pattern, format as a
# > single date using availableFormats, and return.
for field in PATTERN_CHAR_ORDER: # These are in largest-to-smallest order
if field in skel_formats:
if start_fmt.extract(field) != end_fmt.extract(field):
# > If there is a match, use the pieces of the corresponding pattern to
# > format the start and end datetime, as above.
return "".join(
parse_pattern(pattern).apply(instant, locale)
for pattern, instant
in zip(skel_formats[field], (start, end))
)
# > Otherwise, format the start and end datetime using the fallback pattern.
return _format_fallback_interval(start, end, skeleton, tzinfo, locale)
def get_period_id(time, tzinfo=None, type=None, locale=LC_TIME):
"""
Get the day period ID for a given time.
This ID can be used as a key for the period name dictionary.
>>> get_period_names(locale="de")[get_period_id(time(7, 42), locale="de")]
u'Morgen'
:param time: The time to inspect.
:param tzinfo: The timezone for the time. See ``format_time``.
:param type: The period type to use. Either "selection" or None.
The selection type is used for selecting among phrases such as
“Your email arrived yesterday evening” or “Your email arrived last night”.
:param locale: the `Locale` object, or a locale string
:return: period ID. Something is always returned -- even if it's just "am" or "pm".
"""
time = _get_time(time, tzinfo)
seconds_past_midnight = int(time.hour * 60 * 60 + time.minute * 60 + time.second)
locale = Locale.parse(locale)
# The LDML rules state that the rules may not overlap, so iterating in arbitrary
# order should be alright.
for rule_id, rules in locale.day_period_rules.get(type, {}).items():
for rule in rules:
if "at" in rule and rule["at"] == seconds_past_midnight:
return rule_id
start_ok = end_ok = False
if "from" in rule and seconds_past_midnight >= rule["from"]:
start_ok = True
if "to" in rule and seconds_past_midnight <= rule["to"]:
# This rule type does not exist in the present CLDR data;
# excuse the lack of test coverage.
end_ok = True
if "before" in rule and seconds_past_midnight < rule["before"]:
end_ok = True
if "after" in rule and seconds_past_midnight > rule["after"]:
start_ok = True
if start_ok and end_ok:
return rule_id
if seconds_past_midnight < 43200:
return "am"
else:
return "pm"
def parse_date(string, locale=LC_TIME):
"""Parse a date from a string.
This function uses the date format for the locale as a hint to determine
the order in which the date fields appear in the string.
>>> parse_date('4/1/04', locale='en_US')
datetime.date(2004, 4, 1)
>>> parse_date('01.04.2004', locale='de_DE')
datetime.date(2004, 4, 1)
:param string: the string containing the date
:param locale: a `Locale` object or a locale identifier
"""
# TODO: try ISO format first?
format = get_date_format(locale=locale).pattern.lower()
year_idx = format.index('y')
month_idx = format.index('m')
if month_idx < 0:
month_idx = format.index('l')
day_idx = format.index('d')
indexes = [(year_idx, 'Y'), (month_idx, 'M'), (day_idx, 'D')]
indexes.sort()
indexes = dict([(item[1], idx) for idx, item in enumerate(indexes)])
# FIXME: this currently only supports numbers, but should also support month
# names, both in the requested locale, and english
numbers = re.findall('(\d+)', string)
year = numbers[indexes['Y']]
if len(year) == 2:
year = 2000 + int(year)
else:
year = int(year)
month = int(numbers[indexes['M']])
day = int(numbers[indexes['D']])
if month > 12:
month, day = day, month
return date(year, month, day)
def parse_time(string, locale=LC_TIME):
"""Parse a time from a string.
This function uses the time format for the locale as a hint to determine
the order in which the time fields appear in the string.
>>> parse_time('15:30:00', locale='en_US')
datetime.time(15, 30)
:param string: the string containing the time
:param locale: a `Locale` object or a locale identifier
:return: the parsed time
:rtype: `time`
"""
# TODO: try ISO format first?
format = get_time_format(locale=locale).pattern.lower()
hour_idx = format.index('h')
if hour_idx < 0:
hour_idx = format.index('k')
min_idx = format.index('m')
sec_idx = format.index('s')
indexes = [(hour_idx, 'H'), (min_idx, 'M'), (sec_idx, 'S')]
indexes.sort()
indexes = dict([(item[1], idx) for idx, item in enumerate(indexes)])
# FIXME: support 12 hour clock, and 0-based hour specification
# and seconds should be optional, maybe minutes too
# oh, and time-zones, of course
numbers = re.findall('(\d+)', string)
hour = int(numbers[indexes['H']])
minute = int(numbers[indexes['M']])
second = int(numbers[indexes['S']])
return time(hour, minute, second)
class DateTimePattern(object):
def __init__(self, pattern, format):
self.pattern = pattern
self.format = format
def __repr__(self):
return '<%s %r>' % (type(self).__name__, self.pattern)
def __unicode__(self):
return self.pattern
def __mod__(self, other):
if type(other) is not DateTimeFormat:
return NotImplemented
return self.format % other
def apply(self, datetime, locale):
return self % DateTimeFormat(datetime, locale)
class DateTimeFormat(object):
def __init__(self, value, locale):
assert isinstance(value, (date, datetime, time))
if isinstance(value, (datetime, time)) and value.tzinfo is None:
value = value.replace(tzinfo=UTC)
self.value = value
self.locale = Locale.parse(locale)
def __getitem__(self, name):
char = name[0]
num = len(name)
if char == 'G':
return self.format_era(char, num)
elif char in ('y', 'Y', 'u'):
return self.format_year(char, num)
elif char in ('Q', 'q'):
return self.format_quarter(char, num)
elif char in ('M', 'L'):
return self.format_month(char, num)
elif char in ('w', 'W'):
return self.format_week(char, num)
elif char == 'd':
return self.format(self.value.day, num)
elif char == 'D':
return self.format_day_of_year(num)
elif char == 'F':
return self.format_day_of_week_in_month()
elif char in ('E', 'e', 'c'):
return self.format_weekday(char, num)
elif char == 'a':
# TODO: Add support for the rest of the period formats (a*, b*, B*)
return self.format_period(char)
elif char == 'h':
if self.value.hour % 12 == 0:
return self.format(12, num)
else:
return self.format(self.value.hour % 12, num)
elif char == 'H':
return self.format(self.value.hour, num)
elif char == 'K':
return self.format(self.value.hour % 12, num)
elif char == 'k':
if self.value.hour == 0:
return self.format(24, num)
else:
return self.format(self.value.hour, num)
elif char == 'm':
return self.format(self.value.minute, num)
elif char == 's':
return self.format(self.value.second, num)
elif char == 'S':
return self.format_frac_seconds(num)
elif char == 'A':
return self.format_milliseconds_in_day(num)
elif char in ('z', 'Z', 'v', 'V', 'x', 'X', 'O'):
return self.format_timezone(char, num)
else:
raise KeyError('Unsupported date/time field %r' % char)
def extract(self, char):
char = str(char)[0]
if char == 'y':
return self.value.year
elif char == 'M':
return self.value.month
elif char == 'd':
return self.value.day
elif char == 'H':
return self.value.hour
elif char == 'h':
return (self.value.hour % 12 or 12)
elif char == 'm':
return self.value.minute
elif char == 'a':
return int(self.value.hour >= 12) # 0 for am, 1 for pm
else:
raise NotImplementedError("Not implemented: extracting %r from %r" % (char, self.value))
def format_era(self, char, num):
width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[max(3, num)]
era = int(self.value.year >= 0)
return get_era_names(width, self.locale)[era]
def format_year(self, char, num):
value = self.value.year
if char.isupper():
week = self.get_week_number(self.get_day_of_year())
if week == 0:
value -= 1
year = self.format(value, num)
if num == 2:
year = year[-2:]
return year
def format_quarter(self, char, num):
quarter = (self.value.month - 1) // 3 + 1
if num <= 2:
return ('%%0%dd' % num) % quarter
width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[num]
context = {'Q': 'format', 'q': 'stand-alone'}[char]
return get_quarter_names(width, context, self.locale)[quarter]
def format_month(self, char, num):
if num <= 2:
return ('%%0%dd' % num) % self.value.month
width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[num]
context = {'M': 'format', 'L': 'stand-alone'}[char]
return get_month_names(width, context, self.locale)[self.value.month]
def format_week(self, char, num):
if char.islower(): # week of year
day_of_year = self.get_day_of_year()
week = self.get_week_number(day_of_year)
if week == 0:
date = self.value - timedelta(days=day_of_year)
week = self.get_week_number(self.get_day_of_year(date),
date.weekday())
return self.format(week, num)
else: # week of month
week = self.get_week_number(self.value.day)
if week == 0:
date = self.value - timedelta(days=self.value.day)
week = self.get_week_number(date.day, date.weekday())
return '%d' % week
def format_weekday(self, char='E', num=4):
"""
Return weekday from parsed datetime according to format pattern.
>>> format = DateTimeFormat(date(2016, 2, 28), Locale.parse('en_US'))
>>> format.format_weekday()
u'Sunday'
'E': Day of week - Use one through three letters for the abbreviated day name, four for the full (wide) name,
five for the narrow name, or six for the short name.
>>> format.format_weekday('E',2)
u'Sun'
'e': Local day of week. Same as E except adds a numeric value that will depend on the local starting day of the
week, using one or two letters. For this example, Monday is the first day of the week.
>>> format.format_weekday('e',2)
'01'
'c': Stand-Alone local day of week - Use one letter for the local numeric value (same as 'e'), three for the
abbreviated day name, four for the full (wide) name, five for the narrow name, or six for the short name.
>>> format.format_weekday('c',1)
'1'
:param char: pattern format character ('e','E','c')
:param num: count of format character
"""
if num < 3:
if char.islower():
value = 7 - self.locale.first_week_day + self.value.weekday()
return self.format(value % 7 + 1, num)
num = 3
weekday = self.value.weekday()
width = {3: 'abbreviated', 4: 'wide', 5: 'narrow', 6: 'short'}[num]
if char == 'c':
context = 'stand-alone'
else:
context = 'format'
return get_day_names(width, context, self.locale)[weekday]
def format_day_of_year(self, num):
return self.format(self.get_day_of_year(), num)
def format_day_of_week_in_month(self):
return '%d' % ((self.value.day - 1) // 7 + 1)
def format_period(self, char):
period = {0: 'am', 1: 'pm'}[int(self.value.hour >= 12)]
for width in ('wide', 'narrow', 'abbreviated'):
period_names = get_period_names(context='format', width=width, locale=self.locale)
if period in period_names:
return period_names[period]
raise ValueError('Could not format period %s in %s' % (period, self.locale))
def format_frac_seconds(self, num):
""" Return fractional seconds.
Rounds the time's microseconds to the precision given by the number \
of digits passed in.
"""
value = self.value.microsecond / 1000000
return self.format(round(value, num) * 10**num, num)
def format_milliseconds_in_day(self, num):
msecs = self.value.microsecond // 1000 + self.value.second * 1000 + \
self.value.minute * 60000 + self.value.hour * 3600000
return self.format(msecs, num)
def format_timezone(self, char, num):
width = {3: 'short', 4: 'long', 5: 'iso8601'}[max(3, num)]
if char == 'z':
return get_timezone_name(self.value, width, locale=self.locale)
elif char == 'Z':
if num == 5:
return get_timezone_gmt(self.value, width, locale=self.locale, return_z=True)
return get_timezone_gmt(self.value, width, locale=self.locale)
elif char == 'O':
if num == 4:
return get_timezone_gmt(self.value, width, locale=self.locale)
# TODO: To add support for O:1
elif char == 'v':
return get_timezone_name(self.value.tzinfo, width,
locale=self.locale)
elif char == 'V':
if num == 1:
return get_timezone_name(self.value.tzinfo, width,
uncommon=True, locale=self.locale)
elif num == 2:
return get_timezone_name(self.value.tzinfo, locale=self.locale, return_zone=True)
elif num == 3:
return get_timezone_location(self.value.tzinfo, locale=self.locale, return_city=True)
return get_timezone_location(self.value.tzinfo, locale=self.locale)
# Included additional elif condition to add support for 'Xx' in timezone format
elif char == 'X':
if num == 1:
return get_timezone_gmt(self.value, width='iso8601_short', locale=self.locale,
return_z=True)
elif num in (2, 4):
return get_timezone_gmt(self.value, width='short', locale=self.locale,
return_z=True)
elif num in (3, 5):
return get_timezone_gmt(self.value, width='iso8601', locale=self.locale,
return_z=True)
elif char == 'x':
if num == 1:
return get_timezone_gmt(self.value, width='iso8601_short', locale=self.locale)
elif num in (2, 4):
return get_timezone_gmt(self.value, width='short', locale=self.locale)
elif num in (3, 5):
return get_timezone_gmt(self.value, width='iso8601', locale=self.locale)
def format(self, value, length):
return ('%%0%dd' % length) % value
def get_day_of_year(self, date=None):
if date is None:
date = self.value
return (date - date.replace(month=1, day=1)).days + 1
def get_week_number(self, day_of_period, day_of_week=None):
"""Return the number of the week of a day within a period. This may be
the week number in a year or the week number in a month.
Usually this will return a value equal to or greater than 1, but if the
first week of the period is so short that it actually counts as the last
week of the previous period, this function will return 0.
>>> format = DateTimeFormat(date(2006, 1, 8), Locale.parse('de_DE'))
>>> format.get_week_number(6)
1
>>> format = DateTimeFormat(date(2006, 1, 8), Locale.parse('en_US'))
>>> format.get_week_number(6)
2
:param day_of_period: the number of the day in the period (usually
either the day of month or the day of year)
:param day_of_week: the week day; if ommitted, the week day of the
current date is assumed
"""
if day_of_week is None:
day_of_week = self.value.weekday()
first_day = (day_of_week - self.locale.first_week_day -
day_of_period + 1) % 7
if first_day < 0:
first_day += 7
week_number = (day_of_period + first_day - 1) // 7
if 7 - first_day >= self.locale.min_week_days:
week_number += 1
return week_number
PATTERN_CHARS = {
'G': [1, 2, 3, 4, 5], # era
'y': None, 'Y': None, 'u': None, # year
'Q': [1, 2, 3, 4, 5], 'q': [1, 2, 3, 4, 5], # quarter
'M': [1, 2, 3, 4, 5], 'L': [1, 2, 3, 4, 5], # month
'w': [1, 2], 'W': [1], # week
'd': [1, 2], 'D': [1, 2, 3], 'F': [1], 'g': None, # day
'E': [1, 2, 3, 4, 5, 6], 'e': [1, 2, 3, 4, 5, 6], 'c': [1, 3, 4, 5, 6], # week day
'a': [1], # period
'h': [1, 2], 'H': [1, 2], 'K': [1, 2], 'k': [1, 2], # hour
'm': [1, 2], # minute
's': [1, 2], 'S': None, 'A': None, # second
'z': [1, 2, 3, 4], 'Z': [1, 2, 3, 4, 5], 'O': [1, 4], 'v': [1, 4], # zone
'V': [1, 2, 3, 4], 'x': [1, 2, 3, 4, 5], 'X': [1, 2, 3, 4, 5] # zone
}
#: The pattern characters declared in the Date Field Symbol Table
#: (http://www.unicode.org/reports/tr35/tr35-dates.html#Date_Field_Symbol_Table)
#: in order of decreasing magnitude.
PATTERN_CHAR_ORDER = "GyYuUQqMLlwWdDFgEecabBChHKkjJmsSAzZOvVXx"
_pattern_cache = {}
def parse_pattern(pattern):
"""Parse date, time, and datetime format patterns.
>>> parse_pattern("MMMMd").format
u'%(MMMM)s%(d)s'
>>> parse_pattern("MMM d, yyyy").format
u'%(MMM)s %(d)s, %(yyyy)s'
Pattern can contain literal strings in single quotes:
>>> parse_pattern("H:mm' Uhr 'z").format
u'%(H)s:%(mm)s Uhr %(z)s'
An actual single quote can be used by using two adjacent single quote
characters:
>>> parse_pattern("hh' o''clock'").format
u"%(hh)s o'clock"
:param pattern: the formatting pattern to parse
"""
if type(pattern) is DateTimePattern:
return pattern
if pattern in _pattern_cache:
return _pattern_cache[pattern]
result = []
for tok_type, tok_value in tokenize_pattern(pattern):
if tok_type == "chars":
result.append(tok_value.replace('%', '%%'))
elif tok_type == "field":
fieldchar, fieldnum = tok_value
limit = PATTERN_CHARS[fieldchar]
if limit and fieldnum not in limit:
raise ValueError('Invalid length for field: %r'
% (fieldchar * fieldnum))
result.append('%%(%s)s' % (fieldchar * fieldnum))
else:
raise NotImplementedError("Unknown token type: %s" % tok_type)
_pattern_cache[pattern] = pat = DateTimePattern(pattern, u''.join(result))
return pat
def tokenize_pattern(pattern):
"""
Tokenize date format patterns.
Returns a list of (token_type, token_value) tuples.
``token_type`` may be either "chars" or "field".
For "chars" tokens, the value is the literal value.
For "field" tokens, the value is a tuple of (field character, repetition count).
:param pattern: Pattern string
:type pattern: str
:rtype: list[tuple]
"""
result = []
quotebuf = None
charbuf = []
fieldchar = ['']
fieldnum = [0]
def append_chars():
result.append(('chars', ''.join(charbuf).replace('\0', "'")))
del charbuf[:]
def append_field():
result.append(('field', (fieldchar[0], fieldnum[0])))
fieldchar[0] = ''
fieldnum[0] = 0
for idx, char in enumerate(pattern.replace("''", '\0')):
if quotebuf is None:
if char == "'": # quote started
if fieldchar[0]:
append_field()
elif charbuf:
append_chars()
quotebuf = []
elif char in PATTERN_CHARS:
if charbuf:
append_chars()
if char == fieldchar[0]:
fieldnum[0] += 1
else:
if fieldchar[0]:
append_field()
fieldchar[0] = char
fieldnum[0] = 1
else:
if fieldchar[0]:
append_field()
charbuf.append(char)
elif quotebuf is not None:
if char == "'": # end of quote
charbuf.extend(quotebuf)
quotebuf = None
else: # inside quote
quotebuf.append(char)
if fieldchar[0]:
append_field()
elif charbuf:
append_chars()
return result
def untokenize_pattern(tokens):
"""
Turn a date format pattern token stream back into a string.
This is the reverse operation of ``tokenize_pattern``.
:type tokens: Iterable[tuple]
:rtype: str
"""
output = []
for tok_type, tok_value in tokens:
if tok_type == "field":
output.append(tok_value[0] * tok_value[1])
elif tok_type == "chars":
if not any(ch in PATTERN_CHARS for ch in tok_value): # No need to quote
output.append(tok_value)
else:
output.append("'%s'" % tok_value.replace("'", "''"))
return "".join(output)
def split_interval_pattern(pattern):
"""
Split an interval-describing datetime pattern into multiple pieces.
> The pattern is then designed to be broken up into two pieces by determining the first repeating field.
- http://www.unicode.org/reports/tr35/tr35-dates.html#intervalFormats
>>> split_interval_pattern(u'E d.M. \u2013 E d.M.')
[u'E d.M. \u2013 ', 'E d.M.']
>>> split_interval_pattern("Y 'text' Y 'more text'")
["Y 'text '", "Y 'more text'"]
>>> split_interval_pattern(u"E, MMM d \u2013 E")
[u'E, MMM d \u2013 ', u'E']
>>> split_interval_pattern("MMM d")
['MMM d']
>>> split_interval_pattern("y G")
['y G']
>>> split_interval_pattern(u"MMM d \u2013 d")
[u'MMM d \u2013 ', u'd']
:param pattern: Interval pattern string
:return: list of "subpatterns"
"""
seen_fields = set()
parts = [[]]
for tok_type, tok_value in tokenize_pattern(pattern):
if tok_type == "field":
if tok_value[0] in seen_fields: # Repeated field
parts.append([])
seen_fields.clear()
seen_fields.add(tok_value[0])
parts[-1].append((tok_type, tok_value))
return [untokenize_pattern(tokens) for tokens in parts]
def match_skeleton(skeleton, options, allow_different_fields=False):
"""
Find the closest match for the given datetime skeleton among the options given.
This uses the rules outlined in the TR35 document.
>>> match_skeleton('yMMd', ('yMd', 'yMMMd'))
'yMd'
>>> match_skeleton('yMMd', ('jyMMd',), allow_different_fields=True)
'jyMMd'
>>> match_skeleton('yMMd', ('qyMMd',), allow_different_fields=False)
>>> match_skeleton('hmz', ('hmv',))
'hmv'
:param skeleton: The skeleton to match
:type skeleton: str
:param options: An iterable of other skeletons to match against
:type options: Iterable[str]
:return: The closest skeleton match, or if no match was found, None.
:rtype: str|None
"""
# TODO: maybe implement pattern expansion?
# Based on the implementation in
# http://source.icu-project.org/repos/icu/icu4j/trunk/main/classes/core/src/com/ibm/icu/text/DateIntervalInfo.java
# Filter out falsy values and sort for stability; when `interval_formats` is passed in, there may be a None key.
options = sorted(option for option in options if option)
if 'z' in skeleton and not any('z' in option for option in options):
skeleton = skeleton.replace('z', 'v')
get_input_field_width = dict(t[1] for t in tokenize_pattern(skeleton) if t[0] == "field").get
best_skeleton = None
best_distance = None
for option in options:
get_opt_field_width = dict(t[1] for t in tokenize_pattern(option) if t[0] == "field").get
distance = 0
for field in PATTERN_CHARS:
input_width = get_input_field_width(field, 0)
opt_width = get_opt_field_width(field, 0)
if input_width == opt_width:
continue
if opt_width == 0 or input_width == 0:
if not allow_different_fields: # This one is not okay
option = None
break
distance += 0x1000 # Magic weight constant for "entirely different fields"
elif field == 'M' and ((input_width > 2 and opt_width <= 2) or (input_width <= 2 and opt_width > 2)):
distance += 0x100 # Magic weight for "text turns into a number"
else:
distance += abs(input_width - opt_width)
if not option: # We lost the option along the way (probably due to "allow_different_fields")
continue
if not best_skeleton or distance < best_distance:
best_skeleton = option
best_distance = distance
if distance == 0: # Found a perfect match!
break
return best_skeleton
| [
"[email protected]"
] | |
ca2951f89c8fcf239e756f26b15ef01148feb032 | 3b50605ffe45c412ee33de1ad0cadce2c5a25ca2 | /python/paddle/fluid/tests/custom_op/test_multi_out_jit.py | 7e252e048b64c9b158fabe21b818fbccaf71a26c | [
"Apache-2.0"
] | permissive | Superjomn/Paddle | f5f4072cf75ac9ecb0ff528876ee264b14bbf8d1 | 7a0b0dab8e58b6a3b28b3b82c43d55c9bd3d4188 | refs/heads/develop | 2023-02-04T20:27:54.244843 | 2023-01-26T15:31:14 | 2023-01-26T15:31:14 | 66,896,049 | 4 | 1 | Apache-2.0 | 2023-04-14T02:29:52 | 2016-08-30T01:45:54 | C++ | UTF-8 | Python | false | false | 3,680 | py | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
import numpy as np
from utils import extra_cc_args, paddle_includes
import paddle
from paddle.utils.cpp_extension import get_build_directory, load
from paddle.utils.cpp_extension.extension_utils import run_cmd
# Because Windows don't use docker, the shared lib already exists in the
# cache dir, it will not be compiled again unless the shared lib is removed.
file = '{}\\multi_out_jit\\multi_out_jit.pyd'.format(get_build_directory())
if os.name == 'nt' and os.path.isfile(file):
cmd = 'del {}'.format(file)
run_cmd(cmd, True)
# Compile and load custom op Just-In-Time.
multi_out_module = load(
name='multi_out_jit',
sources=['multi_out_test_op.cc'],
extra_include_paths=paddle_includes, # add for Coverage CI
extra_cxx_cflags=extra_cc_args, # test for cflags
verbose=True,
)
class TestMultiOutputDtypes(unittest.TestCase):
def setUp(self):
self.custom_op = multi_out_module.multi_out
self.dtypes = ['float32', 'float64']
self.devices = ['cpu']
def run_static(self, device, dtype):
paddle.set_device(device)
x_data = np.random.uniform(-1, 1, [4, 8]).astype(dtype)
with paddle.static.scope_guard(paddle.static.Scope()):
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.static.data(name='X', shape=[None, 8], dtype=dtype)
outs = self.custom_op(x)
exe = paddle.static.Executor()
exe.run(paddle.static.default_startup_program())
res = exe.run(
paddle.static.default_main_program(),
feed={'X': x_data},
fetch_list=outs,
)
return res
def check_multi_outputs(self, outs, is_dynamic=False):
out, zero_float64, one_int32 = outs
if is_dynamic:
zero_float64 = zero_float64.numpy()
one_int32 = one_int32.numpy()
# Fake_float64
self.assertTrue('float64' in str(zero_float64.dtype))
np.testing.assert_array_equal(
zero_float64, np.zeros([4, 8]).astype('float64')
)
# ZFake_int32
self.assertTrue('int32' in str(one_int32.dtype))
np.testing.assert_array_equal(
one_int32, np.ones([4, 8]).astype('int32')
)
def test_static(self):
paddle.enable_static()
for device in self.devices:
for dtype in self.dtypes:
res = self.run_static(device, dtype)
self.check_multi_outputs(res)
paddle.disable_static()
def test_dynamic(self):
for device in self.devices:
for dtype in self.dtypes:
paddle.set_device(device)
x_data = np.random.uniform(-1, 1, [4, 8]).astype(dtype)
x = paddle.to_tensor(x_data)
outs = self.custom_op(x)
self.assertTrue(len(outs) == 3)
self.check_multi_outputs(outs, True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
665a296262fe97164ada5fc3e0db919390d90e00 | e45d2faad9389886a82ff5176853b1ff6e37caae | /simplecv/017_face_detect.py | e93e398dd543658092ca32de34f80eb4096d57e8 | [] | no_license | allenmo/python_study | 6320aa4cd80fe46ccf73076015c67bdcb6338d30 | 7aff5d810ca6e791d62235d57c072a8dc14457ca | refs/heads/master | 2021-03-24T12:00:33.079530 | 2016-11-22T23:35:58 | 2016-11-22T23:35:58 | 55,770,379 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 678 | py | from SimpleCV import *
cam = Camera()
disp = Display()
size = cam.getImage().size()
segment = HaarCascade("face.xml")
while disp.isNotDone():
img = cam.getImage()
autoface = img.findHaarFeatures(segment)
lenFace = len(autoface)
if ( lenFace > 0 ):
for i in range(0,lenFace):
face = autoface[i]
x = face.x
y = face.y
width = face.width()
height = face.height()
img.dl().centeredRectangle((x,y),(width,height),Color.LIME)
img.applyLayers()
img.drawText("Num of Face: " + str(lenFace), x = size[0]-150, y = size[1]-30, color = Color.LIME, fontsize = 24)
img.show()
| [
"[email protected]"
] | |
f90334a1939d9b22c35a1f046ae87e4ce66693cb | ac305c6739541e84857e297f8eb1b19417978548 | /module_128.py | b9ba541614d3ccd041e0fe0728a597cc18a34050 | [] | no_license | imhardikj/git_test | d6608d6c02e0bc454f9dd31ffbbc5704a7046a61 | 43f0de2e9ac09ecd4fdfee27879fd8ae354a0685 | refs/heads/master | 2020-03-27T21:56:46.394739 | 2018-09-03T11:27:58 | 2018-09-03T11:27:58 | 147,189,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,214 | py | """A set of classes used to represent electric cars."""
from module_121 import Car
class Battery():
"""A simple attempt to model a battery for an electric car."""
def __init__(self, battery_size=70):
"""Initialize the batteery's attributes."""
self.battery_size = battery_size
def describe_battery(self):
"""Print a statement describing the battery size."""
print("This car has a " + str(self.battery_size) + "-kWh battery.")
def get_range(self):
"""Print a statement about the range this battery provides."""
if self.battery_size == 70:
range = 240
elif self.battery_size == 85:
range = 270
message = "This car can go approximately " + str(range)
message += " miles on a full charge."
print(message)
class ElectricCar(Car):
"""Models aspects of a car, specific to electric vehicles."""
def __init__(self, make, model, year):
"""
Initialize attributes of the parent class.
Then initialize attributes specific to an electric car.
"""
super().__init__(make, model, year)
self.battery = Battery()
| [
"[email protected]"
] | |
3c840954bad45d6884f9cadc51628038511b55ba | d6475dda9db9ea6e447db2b4d75d2ebdf454e9d8 | /polls/models.py | fefdac850f120944eee69c1278d883e9925f2e2d | [] | no_license | yoophi/django_polls | 3d92b01f239ed6933b7593408b788f7adf2e6c31 | f94c0ff6307cbdd2d3c65a6b5131a515b6fe67af | refs/heads/master | 2021-01-10T00:57:18.706884 | 2016-03-24T14:50:38 | 2016-03-24T14:50:38 | 54,241,666 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 971 | py | from __future__ import unicode_literals
import datetime
from django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __str__(self):
return self.question_text
def was_published_recently(self):
now = timezone.now()
return now - datetime.timedelta(days=1) <= self.pub_date <= now
was_published_recently.admin_order_field = 'pub_date'
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
@python_2_unicode_compatible
class Choice(models.Model):
question = models.ForeignKey(Question)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __str__(self):
return self.choice_text
| [
"[email protected]"
] | |
c9585d56b0fe94af3866093cae1b512d95ca70cb | fe3265b72e691c6df8ecd936c25b6d48ac33b59a | /tests/components/fritz/test_button.py | 36af1c27f5e0bcf2f1852749964ed9cdf872c95c | [
"Apache-2.0"
] | permissive | bdraco/home-assistant | dcaf76c0967783a08eec30ce704e5e9603a2f0ca | bfa315be51371a1b63e04342a0b275a57ae148bd | refs/heads/dev | 2023-08-16T10:39:15.479821 | 2023-02-21T22:38:50 | 2023-02-21T22:38:50 | 218,684,806 | 13 | 7 | Apache-2.0 | 2023-02-21T23:40:57 | 2019-10-31T04:33:09 | Python | UTF-8 | Python | false | false | 2,402 | py | """Tests for Fritz!Tools button platform."""
from unittest.mock import patch
import pytest
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS
from homeassistant.components.fritz.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from .const import MOCK_USER_DATA
from tests.common import MockConfigEntry
async def test_button_setup(hass: HomeAssistant, fc_class_mock, fh_class_mock) -> None:
"""Test setup of Fritz!Tools buttons."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA)
entry.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, {})
await hass.async_block_till_done()
assert entry.state == ConfigEntryState.LOADED
buttons = hass.states.async_all(BUTTON_DOMAIN)
assert len(buttons) == 4
for button in buttons:
assert button.state == STATE_UNKNOWN
@pytest.mark.parametrize(
("entity_id", "wrapper_method"),
[
("button.mock_title_firmware_update", "async_trigger_firmware_update"),
("button.mock_title_reboot", "async_trigger_reboot"),
("button.mock_title_reconnect", "async_trigger_reconnect"),
("button.mock_title_cleanup", "async_trigger_cleanup"),
],
)
async def test_buttons(
hass: HomeAssistant,
entity_id: str,
wrapper_method: str,
fc_class_mock,
fh_class_mock,
) -> None:
"""Test Fritz!Tools buttons."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA)
entry.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, {})
await hass.async_block_till_done()
assert entry.state == ConfigEntryState.LOADED
button = hass.states.get(entity_id)
assert button
assert button.state == STATE_UNKNOWN
with patch(
f"homeassistant.components.fritz.common.AvmWrapper.{wrapper_method}"
) as mock_press_action:
await hass.services.async_call(
BUTTON_DOMAIN,
SERVICE_PRESS,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
await hass.async_block_till_done()
mock_press_action.assert_called_once()
button = hass.states.get(entity_id)
assert button.state != STATE_UNKNOWN
| [
"[email protected]"
] | |
827c7b9b76801ff6a9ebbc2f8342fe133931ca45 | f17de2f1a2804033a7b7fc74a0d09f964fe1d876 | /hungerExpress/food/migrations/0003_auto_20180331_1736.py | a285d1dd32068594eea223b405926bad96304f74 | [] | no_license | udwivedi394/djangoProjects | 60d6eb275ce75dab3884f1a9c68e01226625c4e2 | 22075b7f850d796afe5a0c06411eb5ff762357b7 | refs/heads/master | 2021-09-10T21:54:44.363710 | 2018-04-03T01:58:27 | 2018-04-03T01:58:27 | 126,106,563 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | # Generated by Django 2.0.3 on 2018-03-31 12:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('food', '0002_auto_20180331_1725'),
]
operations = [
migrations.AlterField(
model_name='restaurant',
name='contact_no',
field=models.CharField(max_length=20),
),
]
| [
"[email protected]"
] | |
f6cc20c827f453c325a7d6abd1a137191b4f3eb1 | 76356eb3f3963051a15f7dfe6867586293bd7534 | /models/pruned/random_pruning/imagenet/resnet50_5.py | 49f3cdf8fcafef99f746b8dc1da08175cd959536 | [] | no_license | ICIdsl/performance_modelling | f59c74c0c6b2e60457694978f9a6d2251f3a70c2 | c48cf66db8e530797d0106a737c5c7da0852423c | refs/heads/master | 2023-07-07T22:32:30.718833 | 2021-08-13T12:48:37 | 2021-08-13T12:48:37 | 394,321,871 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,795 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
class ResNet50(nn.Module):
def __init__(self, num_classes=10):
super().__init__()
self.conv1 = nn.Conv2d(3, 62, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
self.bn1 = nn.BatchNorm2d(62, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
self.layer1_0_conv1 = nn.Conv2d(62, 63, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer1_0_bn1 = nn.BatchNorm2d(63, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer1_0_conv2 = nn.Conv2d(63, 63, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer1_0_bn2 = nn.BatchNorm2d(63, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer1_0_conv3 = nn.Conv2d(63, 251, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer1_0_bn3 = nn.BatchNorm2d(251, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer1_0_downsample_0 = nn.Conv2d(62, 251, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer1_0_downsample_1 = nn.BatchNorm2d(251, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer1_1_conv1 = nn.Conv2d(251, 63, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer1_1_bn1 = nn.BatchNorm2d(63, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer1_1_conv2 = nn.Conv2d(63, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer1_1_bn2 = nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer1_1_conv3 = nn.Conv2d(64, 251, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer1_1_bn3 = nn.BatchNorm2d(251, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer1_2_conv1 = nn.Conv2d(251, 63, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer1_2_bn1 = nn.BatchNorm2d(63, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer1_2_conv2 = nn.Conv2d(63, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer1_2_bn2 = nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer1_2_conv3 = nn.Conv2d(64, 251, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer1_2_bn3 = nn.BatchNorm2d(251, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_0_conv1 = nn.Conv2d(251, 127, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer2_0_bn1 = nn.BatchNorm2d(127, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_0_conv2 = nn.Conv2d(127, 125, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
self.layer2_0_bn2 = nn.BatchNorm2d(125, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_0_conv3 = nn.Conv2d(125, 482, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer2_0_bn3 = nn.BatchNorm2d(482, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_0_downsample_0 = nn.Conv2d(251, 482, kernel_size=(1, 1), stride=(2, 2), bias=False)
self.layer2_0_downsample_1 = nn.BatchNorm2d(482, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_1_conv1 = nn.Conv2d(482, 127, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer2_1_bn1 = nn.BatchNorm2d(127, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_1_conv2 = nn.Conv2d(127, 127, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer2_1_bn2 = nn.BatchNorm2d(127, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_1_conv3 = nn.Conv2d(127, 482, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer2_1_bn3 = nn.BatchNorm2d(482, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_2_conv1 = nn.Conv2d(482, 127, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer2_2_bn1 = nn.BatchNorm2d(127, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_2_conv2 = nn.Conv2d(127, 126, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer2_2_bn2 = nn.BatchNorm2d(126, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_2_conv3 = nn.Conv2d(126, 482, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer2_2_bn3 = nn.BatchNorm2d(482, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_3_conv1 = nn.Conv2d(482, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer2_3_bn1 = nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_3_conv2 = nn.Conv2d(128, 127, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer2_3_bn2 = nn.BatchNorm2d(127, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer2_3_conv3 = nn.Conv2d(127, 482, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer2_3_bn3 = nn.BatchNorm2d(482, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_0_conv1 = nn.Conv2d(482, 255, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_0_bn1 = nn.BatchNorm2d(255, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_0_conv2 = nn.Conv2d(255, 252, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
self.layer3_0_bn2 = nn.BatchNorm2d(252, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_0_conv3 = nn.Conv2d(252, 939, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_0_bn3 = nn.BatchNorm2d(939, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_0_downsample_0 = nn.Conv2d(482, 939, kernel_size=(1, 1), stride=(2, 2), bias=False)
self.layer3_0_downsample_1 = nn.BatchNorm2d(939, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_1_conv1 = nn.Conv2d(939, 253, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_1_bn1 = nn.BatchNorm2d(253, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_1_conv2 = nn.Conv2d(253, 253, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer3_1_bn2 = nn.BatchNorm2d(253, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_1_conv3 = nn.Conv2d(253, 939, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_1_bn3 = nn.BatchNorm2d(939, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_2_conv1 = nn.Conv2d(939, 255, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_2_bn1 = nn.BatchNorm2d(255, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_2_conv2 = nn.Conv2d(255, 254, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer3_2_bn2 = nn.BatchNorm2d(254, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_2_conv3 = nn.Conv2d(254, 939, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_2_bn3 = nn.BatchNorm2d(939, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_3_conv1 = nn.Conv2d(939, 253, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_3_bn1 = nn.BatchNorm2d(253, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_3_conv2 = nn.Conv2d(253, 254, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer3_3_bn2 = nn.BatchNorm2d(254, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_3_conv3 = nn.Conv2d(254, 939, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_3_bn3 = nn.BatchNorm2d(939, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_4_conv1 = nn.Conv2d(939, 250, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_4_bn1 = nn.BatchNorm2d(250, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_4_conv2 = nn.Conv2d(250, 251, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer3_4_bn2 = nn.BatchNorm2d(251, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_4_conv3 = nn.Conv2d(251, 939, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_4_bn3 = nn.BatchNorm2d(939, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_5_conv1 = nn.Conv2d(939, 252, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_5_bn1 = nn.BatchNorm2d(252, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_5_conv2 = nn.Conv2d(252, 253, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer3_5_bn2 = nn.BatchNorm2d(253, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer3_5_conv3 = nn.Conv2d(253, 939, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer3_5_bn3 = nn.BatchNorm2d(939, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer4_0_conv1 = nn.Conv2d(939, 503, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer4_0_bn1 = nn.BatchNorm2d(503, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer4_0_conv2 = nn.Conv2d(503, 502, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
self.layer4_0_bn2 = nn.BatchNorm2d(502, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer4_0_conv3 = nn.Conv2d(502, 1965, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer4_0_bn3 = nn.BatchNorm2d(1965, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer4_0_downsample_0 = nn.Conv2d(939, 1965, kernel_size=(1, 1), stride=(2, 2), bias=False)
self.layer4_0_downsample_1 = nn.BatchNorm2d(1965, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer4_1_conv1 = nn.Conv2d(1965, 505, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer4_1_bn1 = nn.BatchNorm2d(505, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer4_1_conv2 = nn.Conv2d(505, 503, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer4_1_bn2 = nn.BatchNorm2d(503, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer4_1_conv3 = nn.Conv2d(503, 1965, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer4_1_bn3 = nn.BatchNorm2d(1965, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer4_2_conv1 = nn.Conv2d(1965, 504, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer4_2_bn1 = nn.BatchNorm2d(504, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer4_2_conv2 = nn.Conv2d(504, 505, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
self.layer4_2_bn2 = nn.BatchNorm2d(505, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.layer4_2_conv3 = nn.Conv2d(505, 1965, kernel_size=(1, 1), stride=(1, 1), bias=False)
self.layer4_2_bn3 = nn.BatchNorm2d(1965, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
self.avgpool = nn.AdaptiveAvgPool2d(output_size=(1, 1))
self.fc = nn.Linear(in_features=1965, out_features=1000, bias=True)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = F.relu(x, inplace=True)
x = self.maxpool(x)
x_main = x
x_main = self.layer1_0_conv1(x_main)
x_main = self.layer1_0_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer1_0_conv2(x_main)
x_main = self.layer1_0_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer1_0_conv3(x_main)
x_main = self.layer1_0_bn3(x_main)
x_residual = x
x_residual = self.layer1_0_downsample_0(x_residual)
x_residual = self.layer1_0_downsample_1(x_residual)
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer1_1_conv1(x_main)
x_main = self.layer1_1_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer1_1_conv2(x_main)
x_main = self.layer1_1_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer1_1_conv3(x_main)
x_main = self.layer1_1_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer1_2_conv1(x_main)
x_main = self.layer1_2_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer1_2_conv2(x_main)
x_main = self.layer1_2_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer1_2_conv3(x_main)
x_main = self.layer1_2_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer2_0_conv1(x_main)
x_main = self.layer2_0_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer2_0_conv2(x_main)
x_main = self.layer2_0_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer2_0_conv3(x_main)
x_main = self.layer2_0_bn3(x_main)
x_residual = x
x_residual = self.layer2_0_downsample_0(x_residual)
x_residual = self.layer2_0_downsample_1(x_residual)
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer2_1_conv1(x_main)
x_main = self.layer2_1_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer2_1_conv2(x_main)
x_main = self.layer2_1_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer2_1_conv3(x_main)
x_main = self.layer2_1_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer2_2_conv1(x_main)
x_main = self.layer2_2_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer2_2_conv2(x_main)
x_main = self.layer2_2_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer2_2_conv3(x_main)
x_main = self.layer2_2_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer2_3_conv1(x_main)
x_main = self.layer2_3_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer2_3_conv2(x_main)
x_main = self.layer2_3_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer2_3_conv3(x_main)
x_main = self.layer2_3_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer3_0_conv1(x_main)
x_main = self.layer3_0_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_0_conv2(x_main)
x_main = self.layer3_0_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_0_conv3(x_main)
x_main = self.layer3_0_bn3(x_main)
x_residual = x
x_residual = self.layer3_0_downsample_0(x_residual)
x_residual = self.layer3_0_downsample_1(x_residual)
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer3_1_conv1(x_main)
x_main = self.layer3_1_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_1_conv2(x_main)
x_main = self.layer3_1_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_1_conv3(x_main)
x_main = self.layer3_1_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer3_2_conv1(x_main)
x_main = self.layer3_2_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_2_conv2(x_main)
x_main = self.layer3_2_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_2_conv3(x_main)
x_main = self.layer3_2_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer3_3_conv1(x_main)
x_main = self.layer3_3_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_3_conv2(x_main)
x_main = self.layer3_3_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_3_conv3(x_main)
x_main = self.layer3_3_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer3_4_conv1(x_main)
x_main = self.layer3_4_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_4_conv2(x_main)
x_main = self.layer3_4_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_4_conv3(x_main)
x_main = self.layer3_4_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer3_5_conv1(x_main)
x_main = self.layer3_5_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_5_conv2(x_main)
x_main = self.layer3_5_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer3_5_conv3(x_main)
x_main = self.layer3_5_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer4_0_conv1(x_main)
x_main = self.layer4_0_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer4_0_conv2(x_main)
x_main = self.layer4_0_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer4_0_conv3(x_main)
x_main = self.layer4_0_bn3(x_main)
x_residual = x
x_residual = self.layer4_0_downsample_0(x_residual)
x_residual = self.layer4_0_downsample_1(x_residual)
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer4_1_conv1(x_main)
x_main = self.layer4_1_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer4_1_conv2(x_main)
x_main = self.layer4_1_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer4_1_conv3(x_main)
x_main = self.layer4_1_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x_main = x
x_main = self.layer4_2_conv1(x_main)
x_main = self.layer4_2_bn1(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer4_2_conv2(x_main)
x_main = self.layer4_2_bn2(x_main)
x_main = F.relu(x_main, inplace=True)
x_main = self.layer4_2_conv3(x_main)
x_main = self.layer4_2_bn3(x_main)
x_residual = x
x = F.relu(x_main + x_residual, inplace=True)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
def resnet50(**kwargs):
return ResNet50(**kwargs)
| [
"[email protected]"
] | |
ff01db056009a80fa1000e2954fbb76c769b6e7e | a3d2620bbf25002c7b182600c2e40f8f06555e91 | /exc/exc/wsgi.py | 8d7d6db299d15b0077bd2774bf300955b5612354 | [] | no_license | alejo8591/backend-lab | 782736a82933f705f825a1194369bfe13e86c0ec | 4a02a9552083a7c877e91b0f8b81e37a8650cf54 | refs/heads/master | 2016-09-03T03:53:43.878240 | 2015-11-26T06:35:38 | 2015-11-26T06:35:38 | 3,911,349 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 381 | py | """
WSGI config for exc project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "exc.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| [
"[email protected]"
] | |
968290c1917596dac408fca7d1a91f4c18315524 | 3024cafafbfc75193105af7f225d3b12eb2aea46 | /DjangoProjects/project24/iplapp/models.py | b6932bc062b857864ce7ec33dc7f0cac6088b6d7 | [] | no_license | jaishankarg24/Django-Rest-Framework | 33266f6825d51abb8a512426baedf59f2ee957c8 | 809ee9208ffbef4202a8f4058a84f5322793af52 | refs/heads/master | 2023-03-02T20:56:38.051060 | 2021-02-12T05:37:48 | 2021-02-12T05:37:48 | 338,233,009 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 200 | py | from django.db import models
# Create your models here.
class IplTable(models.Model):
name = models.CharField( max_length=50)
age = models.IntegerField()
country = models.CharField( max_length=50) | [
"[email protected]"
] | |
5c5ff093f8e4848fe2435494f5abccda014f4507 | 84a1f9d626828b6ecaee4ef037081f4d8750a990 | /编程/9月/9.12/习题答案.py | df9a5234a978fced165131300f75ac2e75628528 | [] | no_license | dujiaojingyu/Personal-programming-exercises | 5a8f001efa038a0cb3b6d0aa10e06ad2f933fe04 | 72a432c22b52cae3749e2c18cc4244bd5e831f64 | refs/heads/master | 2020-03-25T17:36:40.734446 | 2018-10-01T01:47:36 | 2018-10-01T01:47:36 | 143,986,099 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,921 | py | #coding=utf-8
import linecache
import time
now = time.time() #代码开始时间
# 前期准备,整理数据
data_keys = ('bid', 'uid', 'username', 'v_class', 'content', 'img', 'created_at', 'source', 'rt_num', 'cm_num', 'rt_uid', 'rt_username', 'rt_v_class', 'rt_content', 'rt_img', 'src_rt_num', 'src_cm_num', 'gender', 'rt_bid', 'location', 'rt_mid', 'mid', 'lat', 'lon', 'lbs_type', 'lbs_title', 'poiid', 'links', 'hashtags', 'ats', 'rt_links', 'rt_hashtags', 'rt_ats', 'v_url', 'rt_v_url')
keys = {data_keys[k]:k for k in xrange(0,len(data_keys))}
f = linecache.getlines('t.txt')
lines = [x[1:-1].split('","') for x in f] #拆分
#1 输出用户总数
users = set([line[keys['username']] for line in lines])
user_total = len(set(users))
assert type(user_total) == int
#2 每一个用户的名字 list
users = list(users)
assert type(users) == list
#3 有多少个2012年11月发布的tweets
lines_from_2012_11 = filter(lambda line:line[keys['created_at']].startswith('2012-11'),lines)
lines_total_from_2012_11 = len(lines_from_2012_11)
assert type(lines_total_from_2012_11) == int
#
# #4 该文本里,有哪几天的数据?
#
# users_by_date = [line[keys['created_at']].split(' ')[0] for line in lines]
#
# lines_by_created = list(set(users_by_date))
#
# lines_by_created.sort()
#
# assert type(lines_by_created) == list
#
#
# #5 该文本里,在哪个小时发布的数据最多?
# # todo 这里用time模块做时间转换最好。下例只为讲解拆分方法
#
# hours = [int(line[keys['created_at']][11:13]) for line in lines]
#
# total_by_hour = [(h,hours.count(h)) for h in xrange(0,24) ]
#
# total_by_hour.sort(key=lambda k:k[1],reverse=True)
#
# max_hour = total_by_hour[0][0]
#
# assert type(max_hour) == int
#
#
# #6 该文本里,输出在每一天发表tweets最多的用户
#
# dateline_by_user = {k:dict() for k in lines_by_created}
#
# for line in lines:
# dateline = line[keys['created_at']].split(' ')[0]
# username = line[keys['username']]
# if dateline_by_user[dateline].has_key(username):
# dateline_by_user[dateline][username] += 1
# else:
# dateline_by_user[dateline][username] = 1
#
# for k,v in dateline_by_user.items():
# us = v.items()
# us.sort(key=lambda k:k[1],reverse=True)
# dateline_by_user[k] = {us[0][0]:us[0][1]}
#
# assert type(dateline_by_user) == dict
#
#
# #7 请按照时间顺序输出 2012-11-03 每个小时的发布tweets的频率
#
# lines_from_2012_11_03 = filter(lambda line:line[keys['created_at']].startswith('2012-11-03'),lines)
#
# hourlines_from_2012_11_03 = {str(i):0 for i in xrange(0,24)}
#
# for line in lines_from_2012_11_03:
# hour = line[keys['created_at']][11:13]
# hourlines_from_2012_11_03[str(int(hour))] += 1
#
# hour_timeline_from_2012_11_03 = [(k,v) for k,v in hourlines_from_2012_11_03.items()]
# hour_timeline_from_2012_11_03.sort(key=lambda k:int(k[0]))
#
# assert type(hour_timeline_from_2012_11_03) == list
#
#
# #8 统计该文本里,来源的相关信息和次数
#
# source = set([k[keys['source']] for k in lines])
# source_dict = {s:0 for s in source}
# for line in lines:
# source_name = line[keys['source']]
# source_dict[source_name] += 1
# source_list = [(k,v) for k,v in source_dict.items()]
# source_list.sort(key=lambda k:k[1],reverse=True)
# assert type(source_list) == list
#
#
# #9 计算转发URL中:以:"https://twitter.com/umiushi_no_uta"开头的有几个
#
# umi_total = 0
# for line in lines:
# if line[keys['rt_v_url']].startswith('https://twitter.com/umiushi_no_uta'):
# umi_total += 1
# assert type(umi_total) == int
#
#
# #10 UID为573638104的用户 发了多少个微博
#
# tweets_total_from_573638104 = 0
# for line in lines:
# if line[keys['uid']] == '573638104' :
# tweets_total_from_573638104 += 1
# assert type(tweets_total_from_573638104) == int
#
#
# #11 定义一个函数,该函数可放入任意多的用户uid参数(如果不存在则返回null),函数返回发微薄数最多的用户uid。
#
# def get_user_by_max_tweets(*uids):
#
# '''
# @deprecated:参数可为字符串或者数字
# '''
#
# if len(uids) > 0:
# uids = filter(lambda u:type(u) == int or u.isdigit(),uids)
# uids = map(str,uids)
# if len(uids) > 0:
# uids_dict = {x:0 for x in uids}
# for line in lines:
# uid = line[keys['uid']]
# if uid in uids:
# uids_dict[uid] += 1
# uids_and_tweets_total = [(x,y) for x,y in uids_dict.items()]
# uids_and_tweets_total.sort(key=lambda k:k[1],reverse=True)
# return uids_and_tweets_total[0][0]
# return "null"
#
#
# assert get_user_by_max_tweets() == 'null'
# assert get_user_by_max_tweets('ab','cds') == 'null'
# assert get_user_by_max_tweets('ab','cds','123b') == 'null'
# assert get_user_by_max_tweets('12342','cd') == '12342'
# assert get_user_by_max_tweets('28803555',28803555) == '28803555'
# assert get_user_by_max_tweets('28803555',28803555,'96165754') == '28803555'
#
#
# #12 该文本里,谁发的微博内容长度最长
#
# lines_by_content_length = [(line[keys['username']],len(line[keys['content']])) for line in lines]
# lines_by_content_length.sort(key=lambda k:k[1],reverse=True)
# user_by_max_content = lines_by_content_length[0][0]
# # todo 如果有多个最多怎么办?
# assert type(user_by_max_content) == str
#
#
# #13 该文本里,谁转发的URL最多
#
# lines_by_rt = [(line[keys['uid']],int(line[keys['rt_num']])) for line in lines if line[keys['rt_num']] != '']
# lines_by_rt.sort(key=lambda k:k[1],reverse=True)
# user_by_max_rt = lines_by_rt[0][0]
# assert type(user_by_max_rt) == str
#
#
# #14 该文本里,11点钟,谁发的微博次数最多。
#
# lines_on_hour11 = filter(lambda line:line[keys['created_at']].startswith('11',11,13),lines)
# lines_by_uid_on_hour11 = {k[keys['uid']]:0 for k in lines_on_hour11}
# for line in lines_on_hour11:
# uid = line[keys['uid']]
# lines_by_uid_on_hour11[uid] += 1
# d = [(k,v) for k,v in lines_by_uid_on_hour11.items()]
# d.sort(key=lambda k:k[1],reverse=True)
# uid_by_max_tweets_on_hour11 = d[0][0]
# # todo 如果有多个最多怎么办?
# assert type(uid_by_max_tweets_on_hour11) == str
#
#
# #15 该文本里,哪个用户的源微博URL次数最多。 (要求:输出用户的uid,字符串格式。)
#
# uid_by_v_url = {k[keys['uid']]:0 for k in lines}
# for line in lines:
# uid = line[keys['uid']]
# if lines[keys['v_url']] != '':
# uid_by_v_url[uid] += 1
# uid_sort_by_v_url = [(k,v) for k,v in uid_by_v_url.items()]
# uid_sort_by_v_url.sort(key=lambda k:k[1],reverse=True)
# uid_by_max_v_url = uid_sort_by_v_url[0][0]
# # todo 如果有多个最多怎么办?
# assert type(uid_by_max_v_url) == str
#
# print '运算时间:%s'%(time.time() - now) #整体运行时间
| [
"[email protected]"
] | |
95aa037242063b122b3bd33f7bb1314f54c46850 | 11ad104b0309a2bffd7537d05e2ab3eaf4aed0ca | /tests/helpers/test_storage_remove.py | 9a447771ea630816f159fba84f8ff655f447eb56 | [
"Apache-2.0"
] | permissive | koying/home-assistant | 15e5d01a45fd4373b3d286e1b2ca5aba1311786d | 9fc92ab04e0d1933cc23e89b4095714aee725f8b | refs/heads/dev | 2023-06-24T01:15:12.150720 | 2020-11-01T12:27:33 | 2020-11-01T12:27:33 | 189,232,923 | 2 | 1 | Apache-2.0 | 2023-01-13T06:04:15 | 2019-05-29T13:39:02 | Python | UTF-8 | Python | false | false | 1,252 | py | """Tests for the storage helper with minimal mocking."""
import asyncio
from datetime import timedelta
import os
from homeassistant.helpers import storage
from homeassistant.util import dt
from tests.async_mock import patch
from tests.common import async_fire_time_changed, async_test_home_assistant
async def test_removing_while_delay_in_progress(tmpdir):
"""Test removing while delay in progress."""
loop = asyncio.get_event_loop()
hass = await async_test_home_assistant(loop)
test_dir = await hass.async_add_executor_job(tmpdir.mkdir, "storage")
with patch.object(storage, "STORAGE_DIR", test_dir):
real_store = storage.Store(hass, 1, "remove_me")
await real_store.async_save({"delay": "no"})
assert await hass.async_add_executor_job(os.path.exists, real_store.path)
real_store.async_delay_save(lambda: {"delay": "yes"}, 1)
await real_store.async_remove()
assert not await hass.async_add_executor_job(os.path.exists, real_store.path)
async_fire_time_changed(hass, dt.utcnow() + timedelta(seconds=1))
await hass.async_block_till_done()
assert not await hass.async_add_executor_job(os.path.exists, real_store.path)
await hass.async_stop()
| [
"[email protected]"
] | |
34906a49299704ce8c70279a90752f8f06fab619 | 7c8bd2e26fdabf1555e0150272ecf035f6c21bbd | /ps프로젝트/BS/숫자카드2.py | 8734a278232da1fa846614d424d7f3945e467c48 | [] | no_license | hyeokjinson/algorithm | 44090c2895763a0c53d48ff4084a96bdfc77f953 | 46c04e0f583d4c6ec4f51a24f19a373b173b3d5c | refs/heads/master | 2021-07-21T10:18:43.918149 | 2021-03-27T12:27:56 | 2021-03-27T12:27:56 | 245,392,582 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 619 | py | from collections import Counter
def check(v):
lt=0
rt=n-1
cnt=0
while lt<=rt:
mid=(lt+rt)//2
if arr[mid]==v:
return 1
elif arr[mid]>v:
rt=mid-1
else:
lt=mid+1
return 0
if __name__ == '__main__':
n=int(input())
arr=list(map(int,input().split()))
arr.sort()
m=int(input())
arr1=list(map(int,input().split()))
c=Counter(arr)
res=[]
for i in range(m):
if check(arr1[i]):
res.append(c[arr1[i]])
else:
res.append(0)
for x in res:
print(x,end=' ') | [
"[email protected]"
] | |
9d7d2d581d50ca04cf1b4329b5b87bf803707862 | c2e6b6119a1d03bc293572d568d21a6b76762a1f | /ex.py | 30c1077d8fe6fac7ee1c285147c7a62bef2ee59a | [] | no_license | kafura-kafiri/Fesss | 24a92e5185881066b0d2f61d1649ab0e43a0f479 | 7b660723237dfbdbd3ba9772a9d2a9c771807bb7 | refs/heads/master | 2021-05-03T17:17:54.799918 | 2018-02-06T16:06:40 | 2018-02-06T16:06:40 | 120,443,736 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,280 | py | # LSTM for international airline passengers problem with regression framing
import numpy
from pandas import read_csv
import datetime
import math
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
# fix random seed for reproducibility
numpy.random.seed(7)
# load the dataset
def parse(x):
return datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
dataframe = read_csv('fesss.csv', parse_dates=['Date'], engine='python', date_parser=parse)
dataset = dataframe.values
start = dataset[0, 0]
for i in range(len(dataset)):
_start = dataset[i, 0]
dataset[i, 0] = (dataset[i, 0] - start).total_seconds()
start = _start
dataset = dataset.astype('float32')
# normalize the dataset
delta_scaler = MinMaxScaler(feature_range=(0, 1))
delay_scaler = MinMaxScaler(feature_range=(0, 1))
# print(dataset)
def scale(scaler, dataset, i):
data = dataset[:, i]
data = data.reshape(data.shape[0], 1)
data = scaler.fit_transform(data)
dataset[:, i] = data.reshape(data.shape[0])
return dataset
dataset = scale(delta_scaler, dataset, 0)
dataset = scale(delay_scaler, dataset, 1)
# convert an array of values into a dataset matrix
def create_dataset(dataset, look_back=1):
dataX, dataY = [], []
for i in range(len(dataset) - look_back):
l = [dataset[i + 1][0]]
l.extend(dataset[i:(i + look_back), 1])
l.append(dataset[i + 1][2])
dataX.append(l)
dataY.append(dataset[i + look_back, 0])
return numpy.array(dataX), numpy.array(dataY)
look_back = 1
dataX, dataY = create_dataset(dataset, look_back)
# reshape input to be [samples, time steps, features]
dataX = numpy.reshape(dataX, (dataX.shape[0], 1, dataX.shape[1]))
print(dataset)
print(dataX)
print(dataY)
# create and fit the LSTM network
model = Sequential()
model.add(LSTM(4, input_shape=(1, look_back + 2)))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(dataX, dataY, epochs=100, batch_size=1, verbose=2)
# make predictions
trainPredict = model.predict(dataX)
from math import sqrt
rmse = sqrt(mean_squared_error(dataY, trainPredict))
print('RMSE: %.3f' % rmse)
| [
"[email protected]"
] | |
8ef2d2abe68d0b5499e760395b40896a467518c4 | 2e9193625039cbd93a76a1ac1115e84599c6afcd | /HashTable/hashtableImp.py | 1f19d4d3fcdd4ca486866e38beb7dbb1a273fa65 | [] | no_license | hieudx149/DatastructAndAlgorithms | d54b79c3375dfb17989160a1d2dc74505061eae5 | a5f147b2f644f2a273c50756c9d297fa8b6bcd08 | refs/heads/master | 2023-06-16T13:38:32.039274 | 2021-07-13T10:35:54 | 2021-07-13T10:35:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,581 | py | class hash_table:
def __init__(self, size):
self.size = size
self.data = [None]*self.size
def __str__(self): # As in the array implementation, this method is used to print the attributes of the class object in a dictionary format
return str(self.__dict__)
def _hash(self, key):
hash = 0
for i in range(len(key)):
hash = (hash + ord(key[i])*i) % self.size
return hash
def set(self, key, value):
address = self._hash(key)
if not self.data[address]:
self.data[address] = [[key, value]]
else:
self.data[address].append([key, value])
print(self.data)
def get(self, key):
address = self._hash(key)
bucket = self.data[address]
if bucket:
for i in range(len(bucket)):
if bucket[i][0] == key:
return bucket[i][1]
return None
def keys(self):
list_key = []
for i in range(self.size):
if self.data[i]:
for j in range(len(self.data[i])):
list_key.append(self.data[i][j][0])
return list_key
def values(self):
list_value = []
for i in range(self.size):
if self.data[i]:
for j in range(len(self.data[i])):
list_value.append(self.data[i][j][1])
return list_value
new_hash = hash_table(5)
new_hash.set('duong', 100)
new_hash.set('xuan', 200)
new_hash.set('hieu', 300)
print(new_hash.keys())
print(new_hash.values()) | [
"[email protected]"
] | |
43b720aa6186d5142bf19c70b95377a6e09392e7 | 5b4312ddc24f29538dce0444b7be81e17191c005 | /autoware.ai/1.12.0_cuda/build/memsic_imu/catkin_generated/generate_cached_setup.py | a7913516ce6d717d1e4e1c8927d7d0035dfa6bef | [
"MIT"
] | permissive | muyangren907/autoware | b842f1aeb2bfe7913fb2be002ea4fc426b4e9be2 | 5ae70f0cdaf5fc70b91cd727cf5b5f90bc399d38 | refs/heads/master | 2020-09-22T13:08:14.237380 | 2019-12-03T07:12:49 | 2019-12-03T07:12:49 | 225,167,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,360 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import os
import stat
import sys
# find the import for catkin's python package - either from source space or from an installed underlay
if os.path.exists(os.path.join('/opt/ros/melodic/share/catkin/cmake', 'catkinConfig.cmake.in')):
sys.path.insert(0, os.path.join('/opt/ros/melodic/share/catkin/cmake', '..', 'python'))
try:
from catkin.environment_cache import generate_environment_script
except ImportError:
# search for catkin package in all workspaces and prepend to path
for workspace in "/opt/ros/melodic".split(';'):
python_path = os.path.join(workspace, 'lib/python2.7/dist-packages')
if os.path.isdir(os.path.join(python_path, 'catkin')):
sys.path.insert(0, python_path)
break
from catkin.environment_cache import generate_environment_script
code = generate_environment_script('/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/build/memsic_imu/devel/env.sh')
output_filename = '/home/muyangren907/autoware/autoware.ai/1.12.0_cuda/build/memsic_imu/catkin_generated/setup_cached.sh'
with open(output_filename, 'w') as f:
#print('Generate script for cached setup "%s"' % output_filename)
f.write('\n'.join(code))
mode = os.stat(output_filename).st_mode
os.chmod(output_filename, mode | stat.S_IXUSR)
| [
"[email protected]"
] | |
3dd7a6c1cc0e7b493acc79ecedfa610981f4a0c2 | 6e8d58340f2be5f00d55e2629052c0bbc9dcf390 | /eggs/numpy-1.6.0-py2.7-linux-x86_64-ucs4.egg/numpy/f2py/auxfuncs.py | a12d92b7ea6a8df62af61ded3a2fcb333b26d37c | [
"CC-BY-2.5",
"MIT"
] | permissive | JCVI-Cloud/galaxy-tools-prok | e57389750d33ac766e1658838cdb0aaf9a59c106 | 3c44ecaf4b2e1f2d7269eabef19cbd2e88b3a99c | refs/heads/master | 2021-05-02T06:23:05.414371 | 2014-03-21T18:12:43 | 2014-03-21T18:12:43 | 6,092,693 | 0 | 2 | NOASSERTION | 2020-07-25T20:38:17 | 2012-10-05T15:57:38 | Python | UTF-8 | Python | false | false | 19,936 | py | #!/usr/bin/env python
"""
Auxiliary functions for f2py2e.
Copyright 1999,2000 Pearu Peterson all rights reserved,
Pearu Peterson <[email protected]>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) LICENSE.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/07/24 19:01:55 $
Pearu Peterson
"""
__version__ = "$Revision: 1.65 $"[10:-1]
import __version__
f2py_version = __version__.version
import pprint
import sys
import types
import cfuncs
errmess=sys.stderr.write
#outmess=sys.stdout.write
show=pprint.pprint
options={}
debugoptions=[]
wrapfuncs = 1
if sys.version_info[0] >= 3:
from functools import reduce
def outmess(t):
if options.get('verbose',1):
sys.stdout.write(t)
def debugcapi(var):
return 'capi' in debugoptions
def _isstring(var):
return 'typespec' in var and var['typespec']=='character' and (not isexternal(var))
def isstring(var):
return _isstring(var) and not isarray(var)
def ischaracter(var):
return isstring(var) and 'charselector' not in var
def isstringarray(var):
return isarray(var) and _isstring(var)
def isarrayofstrings(var):
# leaving out '*' for now so that
# `character*(*) a(m)` and `character a(m,*)`
# are treated differently. Luckily `character**` is illegal.
return isstringarray(var) and var['dimension'][-1]=='(*)'
def isarray(var):
return 'dimension' in var and (not isexternal(var))
def isscalar(var):
return not (isarray(var) or isstring(var) or isexternal(var))
def iscomplex(var):
return isscalar(var) and var.get('typespec') in ['complex','double complex']
def islogical(var):
return isscalar(var) and var.get('typespec')=='logical'
def isinteger(var):
return isscalar(var) and var.get('typespec')=='integer'
def isreal(var):
return isscalar(var) and var.get('typespec')=='real'
def get_kind(var):
try:
return var['kindselector']['*']
except KeyError:
try:
return var['kindselector']['kind']
except KeyError:
pass
def islong_long(var):
if not isscalar(var):
return 0
if var.get('typespec') not in ['integer','logical']:
return 0
return get_kind(var)=='8'
def isunsigned_char(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-1'
def isunsigned_short(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-2'
def isunsigned(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-4'
def isunsigned_long_long(var):
if not isscalar(var):
return 0
if var.get('typespec') != 'integer':
return 0
return get_kind(var)=='-8'
def isdouble(var):
if not isscalar(var):
return 0
if not var.get('typespec')=='real':
return 0
return get_kind(var)=='8'
def islong_double(var):
if not isscalar(var):
return 0
if not var.get('typespec')=='real':
return 0
return get_kind(var)=='16'
def islong_complex(var):
if not iscomplex(var):
return 0
return get_kind(var)=='32'
def iscomplexarray(var):
return isarray(var) and var.get('typespec') in ['complex','double complex']
def isint1array(var):
return isarray(var) and var.get('typespec')=='integer' \
and get_kind(var)=='1'
def isunsigned_chararray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-1'
def isunsigned_shortarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-2'
def isunsignedarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-4'
def isunsigned_long_longarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='-8'
def issigned_chararray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='1'
def issigned_shortarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='2'
def issigned_array(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='4'
def issigned_long_longarray(var):
return isarray(var) and var.get('typespec') in ['integer', 'logical']\
and get_kind(var)=='8'
def isallocatable(var):
return 'attrspec' in var and 'allocatable' in var['attrspec']
def ismutable(var):
return not (not 'dimension' in var or isstring(var))
def ismoduleroutine(rout):
return 'modulename' in rout
def ismodule(rout):
return ('block' in rout and 'module'==rout['block'])
def isfunction(rout):
return ('block' in rout and 'function'==rout['block'])
#def isfunction_wrap(rout):
# return wrapfuncs and (iscomplexfunction(rout) or isstringfunction(rout)) and (not isexternal(rout))
def isfunction_wrap(rout):
if isintent_c(rout):
return 0
return wrapfuncs and isfunction(rout) and (not isexternal(rout))
def issubroutine(rout):
return ('block' in rout and 'subroutine'==rout['block'])
def issubroutine_wrap(rout):
if isintent_c(rout):
return 0
return issubroutine(rout) and hasassumedshape(rout)
def hasassumedshape(rout):
if rout.get('hasassumedshape'):
return True
for a in rout['args']:
for d in rout['vars'].get(a,{}).get('dimension',[]):
if d==':':
rout['hasassumedshape'] = True
return True
return False
def isroutine(rout):
return isfunction(rout) or issubroutine(rout)
def islogicalfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return islogical(rout['vars'][a])
return 0
def islong_longfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return islong_long(rout['vars'][a])
return 0
def islong_doublefunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return islong_double(rout['vars'][a])
return 0
def iscomplexfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return iscomplex(rout['vars'][a])
return 0
def iscomplexfunction_warn(rout):
if iscomplexfunction(rout):
outmess("""\
**************************************************************
Warning: code with a function returning complex value
may not work correctly with your Fortran compiler.
Run the following test before using it in your applications:
$(f2py install dir)/test-site/{b/runme_scalar,e/runme}
When using GNU gcc/g77 compilers, codes should work correctly.
**************************************************************\n""")
return 1
return 0
def isstringfunction(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return isstring(rout['vars'][a])
return 0
def hasexternals(rout):
return 'externals' in rout and rout['externals']
def isthreadsafe(rout):
return 'f2pyenhancements' in rout and 'threadsafe' in rout['f2pyenhancements']
def hasvariables(rout):
return 'vars' in rout and rout['vars']
def isoptional(var):
return ('attrspec' in var and 'optional' in var['attrspec'] and 'required' not in var['attrspec']) and isintent_nothide(var)
def isexternal(var):
return ('attrspec' in var and 'external' in var['attrspec'])
def isrequired(var):
return not isoptional(var) and isintent_nothide(var)
def isintent_in(var):
if 'intent' not in var:
return 1
if 'hide' in var['intent']:
return 0
if 'inplace' in var['intent']:
return 0
if 'in' in var['intent']:
return 1
if 'out' in var['intent']:
return 0
if 'inout' in var['intent']:
return 0
if 'outin' in var['intent']:
return 0
return 1
def isintent_inout(var):
return 'intent' in var and ('inout' in var['intent'] or 'outin' in var['intent']) and 'in' not in var['intent'] and 'hide' not in var['intent'] and 'inplace' not in var['intent']
def isintent_out(var):
return 'out' in var.get('intent',[])
def isintent_hide(var):
return ('intent' in var and ('hide' in var['intent'] or ('out' in var['intent'] and 'in' not in var['intent'] and (not l_or(isintent_inout,isintent_inplace)(var)))))
def isintent_nothide(var):
return not isintent_hide(var)
def isintent_c(var):
return 'c' in var.get('intent',[])
# def isintent_f(var):
# return not isintent_c(var)
def isintent_cache(var):
return 'cache' in var.get('intent',[])
def isintent_copy(var):
return 'copy' in var.get('intent',[])
def isintent_overwrite(var):
return 'overwrite' in var.get('intent',[])
def isintent_callback(var):
return 'callback' in var.get('intent',[])
def isintent_inplace(var):
return 'inplace' in var.get('intent',[])
def isintent_aux(var):
return 'aux' in var.get('intent',[])
def isintent_aligned4(var):
return 'aligned4' in var.get('intent',[])
def isintent_aligned8(var):
return 'aligned8' in var.get('intent',[])
def isintent_aligned16(var):
return 'aligned16' in var.get('intent',[])
isintent_dict = {isintent_in:'INTENT_IN',isintent_inout:'INTENT_INOUT',
isintent_out:'INTENT_OUT',isintent_hide:'INTENT_HIDE',
isintent_cache:'INTENT_CACHE',
isintent_c:'INTENT_C',isoptional:'OPTIONAL',
isintent_inplace:'INTENT_INPLACE',
isintent_aligned4:'INTENT_ALIGNED4',
isintent_aligned8:'INTENT_ALIGNED8',
isintent_aligned16:'INTENT_ALIGNED16',
}
def isprivate(var):
return 'attrspec' in var and 'private' in var['attrspec']
def hasinitvalue(var):
return '=' in var
def hasinitvalueasstring(var):
if not hasinitvalue(var):
return 0
return var['='][0] in ['"',"'"]
def hasnote(var):
return 'note' in var
def hasresultnote(rout):
if not isfunction(rout):
return 0
if 'result' in rout:
a=rout['result']
else:
a=rout['name']
if a in rout['vars']:
return hasnote(rout['vars'][a])
return 0
def hascommon(rout):
return 'common' in rout
def containscommon(rout):
if hascommon(rout):
return 1
if hasbody(rout):
for b in rout['body']:
if containscommon(b):
return 1
return 0
def containsmodule(block):
if ismodule(block):
return 1
if not hasbody(block):
return 0
for b in block['body']:
if containsmodule(b):
return 1
return 0
def hasbody(rout):
return 'body' in rout
def hascallstatement(rout):
return getcallstatement(rout) is not None
def istrue(var):
return 1
def isfalse(var):
return 0
class F2PYError(Exception):
pass
class throw_error:
def __init__(self,mess):
self.mess = mess
def __call__(self,var):
mess = '\n\n var = %s\n Message: %s\n' % (var,self.mess)
raise F2PYError,mess
def l_and(*f):
l,l2='lambda v',[]
for i in range(len(f)):
l='%s,f%d=f[%d]'%(l,i,i)
l2.append('f%d(v)'%(i))
return eval('%s:%s'%(l,' and '.join(l2)))
def l_or(*f):
l,l2='lambda v',[]
for i in range(len(f)):
l='%s,f%d=f[%d]'%(l,i,i)
l2.append('f%d(v)'%(i))
return eval('%s:%s'%(l,' or '.join(l2)))
def l_not(f):
return eval('lambda v,f=f:not f(v)')
def isdummyroutine(rout):
try:
return rout['f2pyenhancements']['fortranname']==''
except KeyError:
return 0
def getfortranname(rout):
try:
name = rout['f2pyenhancements']['fortranname']
if name=='':
raise KeyError
if not name:
errmess('Failed to use fortranname from %s\n'%(rout['f2pyenhancements']))
raise KeyError
except KeyError:
name = rout['name']
return name
def getmultilineblock(rout,blockname,comment=1,counter=0):
try:
r = rout['f2pyenhancements'].get(blockname)
except KeyError:
return
if not r: return
if counter>0 and type(r) is type(''):
return
if type(r) is type([]):
if counter>=len(r): return
r = r[counter]
if r[:3]=="'''":
if comment:
r = '\t/* start ' + blockname + ' multiline ('+`counter`+') */\n' + r[3:]
else:
r = r[3:]
if r[-3:]=="'''":
if comment:
r = r[:-3] + '\n\t/* end multiline ('+`counter`+')*/'
else:
r = r[:-3]
else:
errmess("%s multiline block should end with `'''`: %s\n" \
% (blockname,repr(r)))
return r
def getcallstatement(rout):
return getmultilineblock(rout,'callstatement')
def getcallprotoargument(rout,cb_map={}):
r = getmultilineblock(rout,'callprotoargument',comment=0)
if r: return r
if hascallstatement(rout):
outmess('warning: callstatement is defined without callprotoargument\n')
return
from capi_maps import getctype
arg_types,arg_types2 = [],[]
if l_and(isstringfunction,l_not(isfunction_wrap))(rout):
arg_types.extend(['char*','size_t'])
for n in rout['args']:
var = rout['vars'][n]
if isintent_callback(var):
continue
if n in cb_map:
ctype = cb_map[n]+'_typedef'
else:
ctype = getctype(var)
if l_and(isintent_c,l_or(isscalar,iscomplex))(var):
pass
elif isstring(var):
pass
#ctype = 'void*'
else:
ctype = ctype+'*'
if isstring(var) or isarrayofstrings(var):
arg_types2.append('size_t')
arg_types.append(ctype)
proto_args = ','.join(arg_types+arg_types2)
if not proto_args:
proto_args = 'void'
#print proto_args
return proto_args
def getusercode(rout):
return getmultilineblock(rout,'usercode')
def getusercode1(rout):
return getmultilineblock(rout,'usercode',counter=1)
def getpymethoddef(rout):
return getmultilineblock(rout,'pymethoddef')
def getargs(rout):
sortargs,args=[],[]
if 'args' in rout:
args=rout['args']
if 'sortvars' in rout:
for a in rout['sortvars']:
if a in args: sortargs.append(a)
for a in args:
if a not in sortargs:
sortargs.append(a)
else: sortargs=rout['args']
return args,sortargs
def getargs2(rout):
sortargs,args=[],rout.get('args',[])
auxvars = [a for a in rout['vars'].keys() if isintent_aux(rout['vars'][a])\
and a not in args]
args = auxvars + args
if 'sortvars' in rout:
for a in rout['sortvars']:
if a in args: sortargs.append(a)
for a in args:
if a not in sortargs:
sortargs.append(a)
else: sortargs=auxvars + rout['args']
return args,sortargs
def getrestdoc(rout):
if 'f2pymultilines' not in rout:
return None
k = None
if rout['block']=='python module':
k = rout['block'],rout['name']
return rout['f2pymultilines'].get(k,None)
def gentitle(name):
l=(80-len(name)-6)//2
return '/*%s %s %s*/'%(l*'*',name,l*'*')
def flatlist(l):
if type(l)==types.ListType:
return reduce(lambda x,y,f=flatlist:x+f(y),l,[])
return [l]
def stripcomma(s):
if s and s[-1]==',': return s[:-1]
return s
def replace(str,d,defaultsep=''):
if type(d)==types.ListType:
return map(lambda d,f=replace,sep=defaultsep,s=str:f(s,d,sep),d)
if type(str)==types.ListType:
return map(lambda s,f=replace,sep=defaultsep,d=d:f(s,d,sep),str)
for k in 2*d.keys():
if k=='separatorsfor':
continue
if 'separatorsfor' in d and k in d['separatorsfor']:
sep=d['separatorsfor'][k]
else:
sep=defaultsep
if type(d[k])==types.ListType:
str=str.replace('#%s#'%(k),sep.join(flatlist(d[k])))
else:
str=str.replace('#%s#'%(k),d[k])
return str
def dictappend(rd,ar):
if type(ar)==types.ListType:
for a in ar:
rd=dictappend(rd,a)
return rd
for k in ar.keys():
if k[0]=='_':
continue
if k in rd:
if type(rd[k])==str:
rd[k]=[rd[k]]
if type(rd[k])==types.ListType:
if type(ar[k])==types.ListType:
rd[k]=rd[k]+ar[k]
else:
rd[k].append(ar[k])
elif type(rd[k])==types.DictType:
if type(ar[k])==types.DictType:
if k=='separatorsfor':
for k1 in ar[k].keys():
if k1 not in rd[k]:
rd[k][k1]=ar[k][k1]
else:
rd[k]=dictappend(rd[k],ar[k])
else:
rd[k]=ar[k]
return rd
def applyrules(rules,d,var={}):
ret={}
if type(rules)==types.ListType:
for r in rules:
rr=applyrules(r,d,var)
ret=dictappend(ret,rr)
if '_break' in rr:
break
return ret
if '_check' in rules and (not rules['_check'](var)):
return ret
if 'need' in rules:
res = applyrules({'needs':rules['need']},d,var)
if 'needs' in res:
cfuncs.append_needs(res['needs'])
for k in rules.keys():
if k=='separatorsfor':
ret[k]=rules[k]; continue
if type(rules[k])==str:
ret[k]=replace(rules[k],d)
elif type(rules[k])==types.ListType:
ret[k]=[]
for i in rules[k]:
ar=applyrules({k:i},d,var)
if k in ar:
ret[k].append(ar[k])
elif k[0]=='_':
continue
elif type(rules[k])==types.DictType:
ret[k]=[]
for k1 in rules[k].keys():
if type(k1)==types.FunctionType and k1(var):
if type(rules[k][k1])==types.ListType:
for i in rules[k][k1]:
if type(i)==types.DictType:
res=applyrules({'supertext':i},d,var)
if 'supertext' in res:
i=res['supertext']
else: i=''
ret[k].append(replace(i,d))
else:
i=rules[k][k1]
if type(i)==types.DictType:
res=applyrules({'supertext':i},d)
if 'supertext' in res:
i=res['supertext']
else: i=''
ret[k].append(replace(i,d))
else:
errmess('applyrules: ignoring rule %s.\n'%`rules[k]`)
if type(ret[k])==types.ListType:
if len(ret[k])==1:
ret[k]=ret[k][0]
if ret[k]==[]:
del ret[k]
return ret
| [
"[email protected]"
] | |
5cbcaaa43ef258823c6c27044d41b401cda0c79d | 6b301b0b0d5fea69e6ab6d3fcfd0a9741143a9b7 | /config/jupyter/.ipython/profile_default/startup/00-setup-spark.py | 0219daccbe9e74cbcbd99ab8d59a1f0b6a772a72 | [
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-other-permissive",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | frankiegu/pipeline | c7a166e80ccc6a351c32fb1918a41268f2380140 | 3526f58cc9b4d824a23300cd60c647a753902774 | refs/heads/master | 2021-01-11T06:09:36.914324 | 2016-10-03T05:33:41 | 2016-10-03T05:33:41 | 69,836,618 | 1 | 0 | null | 2016-10-03T02:56:09 | 2016-10-03T02:56:09 | null | UTF-8 | Python | false | false | 489 | py | import glob
import os
import sys
# Setup SPARK_HOME
spark_home = os.getenv('SPARK_HOME', None)
if not spark_home:
raise ValueError('SPARK_HOME environment variable is not set')
# System sys.path
sys.path.insert(0, os.path.join(spark_home, 'python'))
for lib in glob.glob(os.path.join(spark_home, 'python/lib/py4j-*-src.zip')):
sys.path.insert(0, lib)
os.environ['PYSPARK_SUBMIT_ARGS']='--master %s %s pyspark-shell' % (os.getenv('SPARK_MASTER'), os.getenv('SPARK_SUBMIT_ARGS'))
| [
"[email protected]"
] | |
bc4d8fdf44a8f6da59b0a8ead9eefac7907e6a29 | b3455474da0bc27c913ff88908be0d0bddba352d | /5.AI/1.Machine Learning/196_mushroom_train2.py | 0919272787d2e7922608902f2ded949c86259dab | [] | no_license | rntva/JumpToPython | 7286bc94e40b553fa7b9fbca7934f2e35f63b54e | 090f0ed5bf28ae7832e5edde11936b71b4fb324b | refs/heads/master | 2021-05-01T02:33:44.528975 | 2018-07-18T08:24:07 | 2018-07-18T08:24:07 | 121,182,629 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,230 | py | import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from sklearn import metrics
from sklearn.model_selection import train_test_split
#데이터 읽기
mr = pd.read_csv("mushroom.csv", header=None)
#데이터 내부의 분류 변수 전개
label = []
data = []
attr_list = []
for row_index, row in mr.iterrows() :
label.append(row.ix[0])
exdata = []
for col, v in enumerate(row.ix[1:]) :
if row_index == 0 :
attr = {"dic" : {}, "cnt" : 0}
attr_list.append(attr)
else :
attr = attr_list[col]
#버섯의 특징 기호를 배열로 나타내기
d = [0,0,0,0,0,0,0,0,0,0,0,0]
if v in attr["dic"] : idx = attr["dic"][v]
else :
idx = attr["cnt"]
attr["dic"][v] = idx
attr["cnt"] += 1
d[idx] = 1
exdata += d
data.append(exdata)
#학습, 데트스 데이터 나누기
data_train, data_test, label_train, label_test = train_test_split(data, label)
#학습시키기
clf = RandomForestClassifier()
clf.fit(data_train, label_train)
#예측하기
pre = clf.predict(data_test)
#결과테스트
ac_score = metrics.accuracy_score(label_test, pre)
print("정답률", ac_score) | [
"[email protected]"
] | |
fedb6ed76a5d7115dd820e753d6a9561b86a1f9e | 36e27ca74b734994fb2e5cd4e328e7b82202d8cd | /nodarb/migrations/0007_nodarb_tips_rada.py | 23417ec23dc96ae31da304e4df5cc8abde817eeb | [] | no_license | svabis/vf | 5e9513f3a767a9561e2fb8bd3e37bb3c03d113dd | d83a4afd177e4f7007a9ce824ae5ed36f18654fc | refs/heads/master | 2020-05-21T21:19:59.952463 | 2018-06-04T11:11:50 | 2018-06-04T11:11:50 | 84,647,341 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nodarb', '0006_auto_20170311_1644'),
]
operations = [
migrations.AddField(
model_name='nodarb_tips',
name='rada',
field=models.BooleanField(default=True),
),
]
| [
"[email protected]"
] | |
a891b7dbf6d6642a5556df699356d8e6d45ea81e | 9eef031728a6cdcd681cad9ba6b0709269383905 | /examples/test/test_analyzer.py | bd467f4878203aa3e45a31a9040cd5ead57b0c12 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | kbeckmann/liteeth | 906b6f30b5d3be28f2bfac91704c7d5ddf26e85e | 54acf9fd76c226d7760294ffde86418e52e0951b | refs/heads/master | 2022-12-24T17:02:42.834415 | 2020-08-24T20:14:35 | 2020-08-24T20:14:35 | 300,029,015 | 0 | 0 | NOASSERTION | 2020-09-30T19:03:51 | 2020-09-30T19:03:50 | null | UTF-8 | Python | false | false | 570 | py | #!/usr/bin/env python3
#
# This file is part of LiteEth.
#
# Copyright (c) 2015-2018 Florent Kermarrec <[email protected]>
# SPDX-License-Identifier: BSD-2-Clause
from litex import RemoteClient
wb = RemoteClient()
wb.open()
# # #
from litescope.software.driver.analyzer import LiteScopeAnalyzerDriver
analyzer = LiteScopeAnalyzerDriver(wb.regs, "analyzer", debug=True)
analyzer.configure_trigger(cond={})
analyzer.configure_subsampler(1)
analyzer.run(offset=128, length=256)
analyzer.wait_done()
analyzer.upload()
analyzer.save("dump.vcd")
# # #
wb.close() | [
"[email protected]"
] | |
ffb723bce5647ba3b185cf4e227e25b2ff78a4d7 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_2/frdyon001/question2.py | 26dadb99dfec05c266eb818b46161070e84fcf6d | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,854 | py | # Student Number: FRDYON001
# Name: Yonela Ford
# 30 Second Rule Expert
# Date: 08 March 2014
def rule():
print("Welcome to the 30 Second Rule Expert")
print("------------------------------------")
print("Answer the following questions by selecting from among the options.")
ans=input("Did anyone see you? (yes/no)\n")
if (ans=="yes"):
ans=input("Was it a boss/lover/parent? (yes/no)\n")
if (ans=="yes"):
ans=input("Was it expensive? (yes/no)\n")
if (ans=="yes"):
ans=input("Can you cut off the part that touched the floor? (yes/no)\n")
if (ans=="yes"):
print("Decision: Eat it.")
elif (ans=="no"):
print("Decision: Your call.")
elif (ans=="no"):
ans=input("Is it chocolate? (yes/no)\n")
if (ans=="yes"):
print("Decision: Eat it.")
elif (ans=="no"):
print("Decision: Don't eat it.")
elif (ans=="no"):
print("Decision: Eat it.")
elif (ans=="no"):
ans=input("Was it sticky? (yes/no)\n")
if (ans=="yes"):
ans=input("Is it a raw steak? (yes/no)\n")
if (ans=="yes"):
ans=input("Are you a puma? (yes/no)\n")
if (ans=="yes"):
print("Decision: Eat it.")
elif (ans=="no"):
print("Decision: Don't eat it.")
elif (ans=="no"):
ans=input("Did the cat lick it? (yes/no)\n")
if (ans=="yes"):
ans=input("Is your cat healthy? (yes/no)\n")
if (ans=="yes"):
print("Decision: Eat it.")
elif (ans=="no"):
print("Decision: Your call.")
elif (ans=="no"):
print( "Decision: Eat it.")
elif (ans=="no"):
ans=input("Is it an Emausaurus? (yes/no)\n")
if (ans=="yes"):
ans=input("Are you a Megalosaurus? (yes/no)\n")
if (ans=="yes"):
print("Decision: Eat it.")
elif (ans=="no"):
print("Decision: Don't eat it.")
elif (ans=="no"):
ans=input("Did the cat lick it? (yes/no)\n")
if (ans=="yes"):
ans=input("Is your cat healthy? (yes/no)\n")
if (ans=="yes"):
print("Decision: Eat it.")
elif (ans=="no"):
print("Decision: Your call.")
elif (ans=="no"):
print("Decision: Eat it.")
rule()
| [
"[email protected]"
] | |
8651ed35b2eb2e819fafde9dad2ea2b5c37309a2 | 9354603c34bd8e0e477f8777054dbd775c994ea4 | /webhr/urls.py | b8fc022d061ed07a32c02af440fd47c815a9bd56 | [] | no_license | Drjimoh/the-hr-project | 8c661ee2b5052b8e919dfcdb75827246bcf8d8ea | fc5d2085c7d4da0326cedc66342b0f9f058136d5 | refs/heads/master | 2020-05-20T16:20:02.217053 | 2019-05-08T18:58:44 | 2019-05-08T18:58:44 | 185,664,268 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,106 | py | """webhr URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from cvranker import views
from django.contrib.admin.views.decorators import staff_member_required
urlpatterns = [
path('admin/', admin.site.urls),
path('api', staff_member_required(views.ApiView.as_view()), name='api view'),
path('', views.add_cv, name= 'Homepage'),
path('ranks', views.get_top_cvs, name='Ranks'),
path('generate', views.GenerateScoredCvView.as_view(), name='generate'),
]
| [
"[email protected]"
] | |
a53ec68b0e8ce40a7cda19097562ab614461ffc1 | b05761d771bb5a85d39d370c649567c1ff3eb089 | /venv/lib/python3.10/site-packages/pip/_internal/operations/install/wheel.py | 6a5fa1d94277dc9ee39c696ded14e23b17d938e6 | [] | no_license | JawshyJ/Coding_Practice | 88c49cab955eab04609ec1003b6b8c20f103fc06 | eb6b229d41aa49b1545af2120e6bee8e982adb41 | refs/heads/master | 2023-02-19T10:18:04.818542 | 2023-02-06T21:22:58 | 2023-02-06T21:22:58 | 247,788,631 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,185 | py | """Support for installing and building the "wheel" binary package format.
"""
import collections
import compileall
import contextlib
import csv
import importlib
import logging
import os.path
import re
import shutil
import sys
import warnings
from base64 import urlsafe_b64decode, urlsafe_b64encode
from email.message import Message
from itertools import chain, filterfalse, starmap
from typing import (
IO,
TYPE_CHECKING,
Any,
BinaryIO,
Callable,
Dict,
Iterable,
Iterator,
List,
NewType,
Optional,
Sequence,
Set,
Tuple,
Union,
cast,
)
from zipfile import ZipFile, ZipInfo
from pip._vendor import pkg_resources
from pip._vendor.distlib.scripts import ScriptMaker
from pip._vendor.distlib.util import get_export_entry
from pip._vendor.pkg_resources import Distribution
from pip._vendor.six import ensure_str, ensure_text, reraise
from pip._internal.exceptions import InstallationError
from pip._internal.locations import get_major_minor_version
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition
from pip._internal.utils.unpacking import (
current_umask,
is_within_directory,
set_extracted_file_to_default_mode_plus_executable,
zip_item_is_executable,
)
from pip._internal.utils.wheel import parse_wheel, pkg_resources_distribution_for_wheel
if TYPE_CHECKING:
from typing import Protocol
class File(Protocol):
src_record_path = None # type: RecordPath
dest_path = None # type: str
changed = None # type: bool
def save(self):
# type: () -> None
pass
logger = logging.getLogger(__name__)
RecordPath = NewType('RecordPath', str)
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
def rehash(path, blocksize=1 << 20):
# type: (str, int) -> Tuple[str, str]
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
h, length = hash_file(path, blocksize)
digest = 'sha256=' + urlsafe_b64encode(
h.digest()
).decode('latin1').rstrip('=')
return (digest, str(length))
def csv_io_kwargs(mode):
# type: (str) -> Dict[str, Any]
"""Return keyword arguments to properly open a CSV file
in the given mode.
"""
return {'mode': mode, 'newline': '', 'encoding': 'utf-8'}
def fix_script(path):
# type: (str) -> bool
"""Replace #!python with #!/path/to/python
Return True if file was changed.
"""
# XXX RECORD hashes will need to be updated
assert os.path.isfile(path)
with open(path, 'rb') as script:
firstline = script.readline()
if not firstline.startswith(b'#!python'):
return False
exename = firstline[2:]
firstline = b'#!/usr/bin/env ' + exename + os.linesep.encode("ascii")
rest = script.read()
# If the file is installed from the pool, let's unlink it before
# writing the new version.
if not os.access(path, os.W_OK):
os.unlink(path)
with open(path, 'wb') as script:
script.write(firstline)
script.write(rest)
return True
def wheel_root_is_purelib(metadata):
# type: (Message) -> bool
return metadata.get("Root-Is-Purelib", "").lower() == "true"
def get_entrypoints(distribution):
# type: (Distribution) -> Tuple[Dict[str, str], Dict[str, str]]
# get the entry points and then the script names
try:
console = distribution.get_entry_map('console_scripts')
gui = distribution.get_entry_map('gui_scripts')
except KeyError:
# Our dict-based Distribution raises KeyError if entry_points.txt
# doesn't exist.
return {}, {}
def _split_ep(s):
# type: (pkg_resources.EntryPoint) -> Tuple[str, str]
"""get the string representation of EntryPoint,
remove space and split on '='
"""
split_parts = str(s).replace(" ", "").split("=")
return split_parts[0], split_parts[1]
# convert the EntryPoint objects into strings with module:function
console = dict(_split_ep(v) for v in console.values())
gui = dict(_split_ep(v) for v in gui.values())
return console, gui
def message_about_scripts_not_on_PATH(scripts):
# type: (Sequence[str]) -> Optional[str]
"""Determine if any scripts are not on PATH and format a warning.
Returns a warning message if one or more scripts are not on PATH,
otherwise None.
"""
if not scripts:
return None
# Group scripts by the path they were installed in
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
for destfile in scripts:
parent_dir = os.path.dirname(destfile)
script_name = os.path.basename(destfile)
grouped_by_dir[parent_dir].add(script_name)
# We don't want to warn for directories that are on PATH.
not_warn_dirs = [
os.path.normcase(i).rstrip(os.sep) for i in
os.environ.get("PATH", "").split(os.pathsep)
]
# If an executable sits with sys.executable, we don't warn for it.
# This covers the case of venv invocations without activating the venv.
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
warn_for = {
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
if os.path.normcase(parent_dir) not in not_warn_dirs
} # type: Dict[str, Set[str]]
if not warn_for:
return None
# Format a message
msg_lines = []
for parent_dir, dir_scripts in warn_for.items():
sorted_scripts = sorted(dir_scripts) # type: List[str]
if len(sorted_scripts) == 1:
start_text = "script {} is".format(sorted_scripts[0])
else:
start_text = "scripts {} are".format(
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
)
msg_lines.append(
"The {} installed in '{}' which is not on PATH."
.format(start_text, parent_dir)
)
last_line_fmt = (
"Consider adding {} to PATH or, if you prefer "
"to suppress this warning, use --no-warn-script-location."
)
if len(msg_lines) == 1:
msg_lines.append(last_line_fmt.format("this directory"))
else:
msg_lines.append(last_line_fmt.format("these directories"))
# Add a note if any directory starts with ~
warn_for_tilde = any(
i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
)
if warn_for_tilde:
tilde_warning_msg = (
"NOTE: The current PATH contains path(s) starting with `~`, "
"which may not be expanded by all applications."
)
msg_lines.append(tilde_warning_msg)
# Returns the formatted multiline message
return "\n".join(msg_lines)
def _normalized_outrows(outrows):
# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]
"""Normalize the given rows of a RECORD file.
Items in each row are converted into str. Rows are then sorted to make
the value more predictable for tests.
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
passed to this function, the size can be an integer as an int or string,
or the empty string.
"""
# Normally, there should only be one row per path, in which case the
# second and third elements don't come into play when sorting.
# However, in cases in the wild where a path might happen to occur twice,
# we don't want the sort operation to trigger an error (but still want
# determinism). Since the third element can be an int or string, we
# coerce each element to a string to avoid a TypeError in this case.
# For additional background, see--
# https://github.com/pypa/pip/issues/5868
return sorted(
(ensure_str(record_path, encoding='utf-8'), hash_, str(size))
for record_path, hash_, size in outrows
)
def _record_to_fs_path(record_path):
# type: (RecordPath) -> str
return record_path
def _fs_to_record_path(path, relative_to=None):
# type: (str, Optional[str]) -> RecordPath
if relative_to is not None:
# On Windows, do not handle relative paths if they belong to different
# logical disks
if os.path.splitdrive(path)[0].lower() == \
os.path.splitdrive(relative_to)[0].lower():
path = os.path.relpath(path, relative_to)
path = path.replace(os.path.sep, '/')
return cast('RecordPath', path)
def _parse_record_path(record_column):
# type: (str) -> RecordPath
p = ensure_text(record_column, encoding='utf-8')
return cast('RecordPath', p)
def get_csv_rows_for_installed(
old_csv_rows, # type: List[List[str]]
installed, # type: Dict[RecordPath, RecordPath]
changed, # type: Set[RecordPath]
generated, # type: List[str]
lib_dir, # type: str
):
# type: (...) -> List[InstalledCSVRow]
"""
:param installed: A map from archive RECORD path to installation RECORD
path.
"""
installed_rows = [] # type: List[InstalledCSVRow]
for row in old_csv_rows:
if len(row) > 3:
logger.warning('RECORD line has more than three elements: %s', row)
old_record_path = _parse_record_path(row[0])
new_record_path = installed.pop(old_record_path, old_record_path)
if new_record_path in changed:
digest, length = rehash(_record_to_fs_path(new_record_path))
else:
digest = row[1] if len(row) > 1 else ''
length = row[2] if len(row) > 2 else ''
installed_rows.append((new_record_path, digest, length))
for f in generated:
path = _fs_to_record_path(f, lib_dir)
digest, length = rehash(f)
installed_rows.append((path, digest, length))
for installed_record_path in installed.values():
installed_rows.append((installed_record_path, '', ''))
return installed_rows
def get_console_script_specs(console):
# type: (Dict[str, str]) -> List[str]
"""
Given the mapping from entrypoint name to callable, return the relevant
console script specs.
"""
# Don't mutate caller's version
console = console.copy()
scripts_to_generate = []
# Special case pip and setuptools to generate versioned wrappers
#
# The issue is that some projects (specifically, pip and setuptools) use
# code in setup.py to create "versioned" entry points - pip2.7 on Python
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
# the wheel metadata at build time, and so if the wheel is installed with
# a *different* version of Python the entry points will be wrong. The
# correct fix for this is to enhance the metadata to be able to describe
# such versioned entry points, but that won't happen till Metadata 2.0 is
# available.
# In the meantime, projects using versioned entry points will either have
# incorrect versioned entry points, or they will not be able to distribute
# "universal" wheels (i.e., they will need a wheel per Python version).
#
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
# override the versioned entry points in the wheel and generate the
# correct ones. This code is purely a short-term measure until Metadata 2.0
# is available.
#
# To add the level of hack in this section of code, in order to support
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
# variable which will control which version scripts get installed.
#
# ENSUREPIP_OPTIONS=altinstall
# - Only pipX.Y and easy_install-X.Y will be generated and installed
# ENSUREPIP_OPTIONS=install
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
# that this option is technically if ENSUREPIP_OPTIONS is set and is
# not altinstall
# DEFAULT
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
# and easy_install-X.Y.
pip_script = console.pop('pip', None)
if pip_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
scripts_to_generate.append('pip = ' + pip_script)
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
scripts_to_generate.append(
'pip{} = {}'.format(sys.version_info[0], pip_script)
)
scripts_to_generate.append(
f'pip{get_major_minor_version()} = {pip_script}'
)
# Delete any other versioned pip entry points
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
for k in pip_ep:
del console[k]
easy_install_script = console.pop('easy_install', None)
if easy_install_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
scripts_to_generate.append(
'easy_install = ' + easy_install_script
)
scripts_to_generate.append(
'easy_install-{} = {}'.format(
get_major_minor_version(), easy_install_script
)
)
# Delete any other versioned easy_install entry points
easy_install_ep = [
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
]
for k in easy_install_ep:
del console[k]
# Generate the console entry points specified in the wheel
scripts_to_generate.extend(starmap('{} = {}'.format, console.items()))
return scripts_to_generate
class ContentAddressablePool:
def __init__(self, cache_dir, save, symlink):
# type: (str, bool, bool) -> None
self.cache_dir = cache_dir
self.save = save
self.symlink = symlink
def path_for_digest(self, digest):
# type: (str) -> str
return os.path.join(
self.cache_dir,
'pool',
digest[:2],
digest[2:4],
digest[4:6],
digest[6:]
)
class ZipBackedFile:
def __init__(
self,
src_record_path, # type: RecordPath
dest_path, # type: str
zip_file, # type: ZipFile
sha256_hash, # type: Optional[str]
pool, # type: Optional[ContentAddressablePool]
):
# type: (...) -> None
self.src_record_path = src_record_path
self.dest_path = dest_path
self._zip_file = zip_file
self.changed = False
self.sha256_hash = sha256_hash
self.pool = pool
def _getinfo(self):
# type: () -> ZipInfo
return self._zip_file.getinfo(self.src_record_path)
def save(self):
# type: () -> None
# When we open the output file below, any existing file is truncated
# before we start writing the new contents. This is fine in most
# cases, but can cause a segfault if pip has loaded a shared
# object (e.g. from pyopenssl through its vendored urllib3)
# Since the shared object is mmap'd an attempt to call a
# symbol in it will then cause a segfault. Unlinking the file
# allows writing of new contents while allowing the process to
# continue to use the old copy.
if os.path.exists(self.dest_path):
os.unlink(self.dest_path)
def _save(dest_path, writable=True):
# type: (str, bool) -> None
# directory creation is lazy and after file filtering
# to ensure we don't install empty dirs; empty dirs can't be
# uninstalled.
parent_dir = os.path.dirname(dest_path)
ensure_dir(parent_dir)
zipinfo = self._getinfo()
with self._zip_file.open(zipinfo) as f:
with open(dest_path, "wb") as dest:
shutil.copyfileobj(f, dest)
if zip_item_is_executable(zipinfo):
set_extracted_file_to_default_mode_plus_executable(
dest_path,
writable=writable
)
if self.sha256_hash is not None and self.pool is not None:
cached_path = self.pool.path_for_digest(self.sha256_hash)
if not os.path.isfile(cached_path):
if not self.pool.save:
# We're not going to use the pool.
_save(self.dest_path, writable=True)
return
# Save to cache and symlink from there.
_save(cached_path, writable=False)
parent_dir = os.path.dirname(self.dest_path)
ensure_dir(parent_dir)
if self.pool.symlink:
os.symlink(cached_path, self.dest_path)
return
# Fall back to a hard link. This might not work in all
# platforms and situations, so fall back to regular
# copying if this fails.
try:
os.link(cached_path, self.dest_path)
return
except OSError:
# This is moderately expected. Fall back to copy.
pass
_save(self.dest_path, writable=True)
class ScriptFile:
def __init__(self, file):
# type: (File) -> None
self._file = file
self.src_record_path = self._file.src_record_path
self.dest_path = self._file.dest_path
self.changed = False
def save(self):
# type: () -> None
self._file.save()
self.changed = fix_script(self.dest_path)
class MissingCallableSuffix(InstallationError):
def __init__(self, entry_point):
# type: (str) -> None
super().__init__(
"Invalid script entry point: {} - A callable "
"suffix is required. Cf https://packaging.python.org/"
"specifications/entry-points/#use-for-scripts for more "
"information.".format(entry_point)
)
def _raise_for_invalid_entrypoint(specification):
# type: (str) -> None
entry = get_export_entry(specification)
if entry is not None and entry.suffix is None:
raise MissingCallableSuffix(str(entry))
class PipScriptMaker(ScriptMaker):
def make(self, specification, options=None):
# type: (str, Dict[str, Any]) -> List[str]
_raise_for_invalid_entrypoint(specification)
return super().make(specification, options)
def _install_wheel(
name, # type: str
wheel_zip, # type: ZipFile
wheel_path, # type: str
scheme, # type: Scheme
pycompile=True, # type: bool
noop=False, # type: bool
warn_script_location=True, # type: bool
direct_url=None, # type: Optional[DirectUrl]
requested=False, # type: bool
pool=None, # type: Optional[ContentAddressablePool]
):
# type: (...) -> None
"""Install a wheel.
:param name: Name of the project to install
:param wheel_zip: open ZipFile for wheel being installed
:param scheme: Distutils scheme dictating the install directories
:param req_description: String used in place of the requirement, for
logging
:param pycompile: Whether to byte-compile installed Python files
:param warn_script_location: Whether to check that scripts are installed
into a directory on PATH
:param pool: An optional content-addressable pool cache
:raises UnsupportedWheel:
* when the directory holds an unpacked wheel with incompatible
Wheel-Version
* when the .dist-info dir does not match the wheel
"""
info_dir, metadata = parse_wheel(wheel_zip, name)
if wheel_root_is_purelib(metadata):
lib_dir = scheme.purelib
else:
lib_dir = scheme.platlib
distribution = pkg_resources_distribution_for_wheel(
wheel_zip, name, wheel_path
)
record_text = distribution.get_metadata('RECORD')
record_rows = list(csv.reader(record_text.splitlines()))
digests = {} # type: Dict[RecordPath, str]
if pool is not None:
for row in record_rows:
if len(row) < 3:
continue
parsed_record_path = _parse_record_path(row[0])
if '=' not in row[1]:
continue
digest_name, b64hash = row[1].split('=', 1)
if digest_name != 'sha256':
continue
digests[parsed_record_path] = urlsafe_b64decode(f'{b64hash}=').hex()
# Record details of the files moved
# installed = files copied from the wheel to the destination
# changed = files changed while installing (scripts #! line typically)
# generated = files newly generated during the install (script wrappers)
installed = {} # type: Dict[RecordPath, RecordPath]
changed = set() # type: Set[RecordPath]
generated = [] # type: List[str]
def record_installed(srcfile, destfile, modified=False):
# type: (RecordPath, str, bool) -> None
"""Map archive RECORD paths to installation RECORD paths."""
newpath = _fs_to_record_path(destfile, lib_dir)
installed[srcfile] = newpath
if modified:
changed.add(_fs_to_record_path(destfile))
def all_paths():
# type: () -> Iterable[RecordPath]
names = wheel_zip.namelist()
# If a flag is set, names may be unicode in Python 2. We convert to
# text explicitly so these are valid for lookup in RECORD.
decoded_names = map(ensure_text, names)
for name in decoded_names:
yield cast("RecordPath", name)
def is_dir_path(path):
# type: (RecordPath) -> bool
return path.endswith("/")
def assert_no_path_traversal(dest_dir_path, target_path):
# type: (str, str) -> None
if not is_within_directory(dest_dir_path, target_path):
message = (
"The wheel {!r} has a file {!r} trying to install"
" outside the target directory {!r}"
)
raise InstallationError(
message.format(wheel_path, target_path, dest_dir_path)
)
def root_scheme_file_maker(zip_file, dest):
# type: (ZipFile, str) -> Callable[[RecordPath], File]
def make_root_scheme_file(record_path):
# type: (RecordPath) -> File
normed_path = os.path.normpath(record_path)
dest_path = os.path.join(dest, normed_path)
assert_no_path_traversal(dest, dest_path)
return ZipBackedFile(
record_path,
dest_path,
zip_file,
digests.get(record_path),
pool
)
return make_root_scheme_file
def data_scheme_file_maker(zip_file, scheme):
# type: (ZipFile, Scheme) -> Callable[[RecordPath], File]
scheme_paths = {}
for key in SCHEME_KEYS:
encoded_key = ensure_text(key)
scheme_paths[encoded_key] = ensure_text(
getattr(scheme, key), encoding=sys.getfilesystemencoding()
)
def make_data_scheme_file(record_path):
# type: (RecordPath) -> File
normed_path = os.path.normpath(record_path)
try:
_, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
except ValueError:
message = (
"Unexpected file in {}: {!r}. .data directory contents"
" should be named like: '<scheme key>/<path>'."
).format(wheel_path, record_path)
raise InstallationError(message)
try:
scheme_path = scheme_paths[scheme_key]
except KeyError:
valid_scheme_keys = ", ".join(sorted(scheme_paths))
message = (
"Unknown scheme key used in {}: {} (for file {!r}). .data"
" directory contents should be in subdirectories named"
" with a valid scheme key ({})"
).format(
wheel_path, scheme_key, record_path, valid_scheme_keys
)
raise InstallationError(message)
dest_path = os.path.join(scheme_path, dest_subpath)
assert_no_path_traversal(scheme_path, dest_path)
return ZipBackedFile(
record_path,
dest_path,
zip_file,
digests.get(record_path),
pool
)
return make_data_scheme_file
def is_data_scheme_path(path):
# type: (RecordPath) -> bool
return path.split("/", 1)[0].endswith(".data")
paths = all_paths()
file_paths = filterfalse(is_dir_path, paths)
root_scheme_paths, data_scheme_paths = partition(
is_data_scheme_path, file_paths
)
make_root_scheme_file = root_scheme_file_maker(
wheel_zip,
ensure_text(lib_dir, encoding=sys.getfilesystemencoding()),
)
files = map(make_root_scheme_file, root_scheme_paths)
def is_script_scheme_path(path):
# type: (RecordPath) -> bool
parts = path.split("/", 2)
return (
len(parts) > 2 and
parts[0].endswith(".data") and
parts[1] == "scripts"
)
other_scheme_paths, script_scheme_paths = partition(
is_script_scheme_path, data_scheme_paths
)
make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme)
other_scheme_files = map(make_data_scheme_file, other_scheme_paths)
files = chain(files, other_scheme_files)
# Get the defined entry points
console, gui = get_entrypoints(distribution)
def is_entrypoint_wrapper(file):
# type: (File) -> bool
# EP, EP.exe and EP-script.py are scripts generated for
# entry point EP by setuptools
path = file.dest_path
name = os.path.basename(path)
if name.lower().endswith('.exe'):
matchname = name[:-4]
elif name.lower().endswith('-script.py'):
matchname = name[:-10]
elif name.lower().endswith(".pya"):
matchname = name[:-4]
else:
matchname = name
# Ignore setuptools-generated scripts
return (matchname in console or matchname in gui)
script_scheme_files = map(make_data_scheme_file, script_scheme_paths)
script_scheme_files = filterfalse(
is_entrypoint_wrapper, script_scheme_files
)
script_scheme_files = map(ScriptFile, script_scheme_files)
files = chain(files, script_scheme_files)
if noop:
# Nothing to do here.
return
for file in files:
file.save()
record_installed(file.src_record_path, file.dest_path, file.changed)
def pyc_source_file_paths():
# type: () -> Iterator[str]
# We de-duplicate installation paths, since there can be overlap (e.g.
# file in .data maps to same location as file in wheel root).
# Sorting installation paths makes it easier to reproduce and debug
# issues related to permissions on existing files.
for installed_path in sorted(set(installed.values())):
full_installed_path = os.path.join(lib_dir, installed_path)
if not os.path.isfile(full_installed_path):
continue
if not full_installed_path.endswith('.py'):
continue
yield full_installed_path
def pyc_output_path(path):
# type: (str) -> str
"""Return the path the pyc file would have been written to.
"""
return importlib.util.cache_from_source(path)
# Compile all of the pyc files for the installed files
if pycompile:
with captured_stdout() as stdout:
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
for path in pyc_source_file_paths():
# Python 2's `compileall.compile_file` requires a str in
# error cases, so we must convert to the native type.
path_arg = ensure_str(
path, encoding=sys.getfilesystemencoding()
)
success = compileall.compile_file(
path_arg, force=True, quiet=True
)
if success:
pyc_path = pyc_output_path(path)
assert os.path.exists(pyc_path)
pyc_record_path = cast(
"RecordPath", pyc_path.replace(os.path.sep, "/")
)
record_installed(pyc_record_path, pyc_path)
logger.debug(stdout.getvalue())
maker = PipScriptMaker(None, scheme.scripts)
# Ensure old scripts are overwritten.
# See https://github.com/pypa/pip/issues/1800
maker.clobber = True
# Ensure we don't generate any variants for scripts because this is almost
# never what somebody wants.
# See https://bitbucket.org/pypa/distlib/issue/35/
maker.variants = {''}
# This is required because otherwise distlib creates scripts that are not
# executable.
# See https://bitbucket.org/pypa/distlib/issue/32/
maker.set_mode = True
# Generate the console and GUI entry points specified in the wheel
scripts_to_generate = get_console_script_specs(console)
gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items()))
generated_console_scripts = maker.make_multiple(scripts_to_generate)
generated.extend(generated_console_scripts)
generated.extend(
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
)
if warn_script_location:
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
if msg is not None:
logger.warning(msg)
generated_file_mode = 0o666 & ~current_umask()
@contextlib.contextmanager
def _generate_file(path, **kwargs):
# type: (str, **Any) -> Iterator[BinaryIO]
with adjacent_tmp_file(path, **kwargs) as f:
yield f
os.chmod(f.name, generated_file_mode)
replace(f.name, path)
dest_info_dir = os.path.join(lib_dir, info_dir)
# Record pip as the installer
installer_path = os.path.join(dest_info_dir, 'INSTALLER')
with _generate_file(installer_path) as installer_file:
installer_file.write(b'pip\n')
generated.append(installer_path)
# Record the PEP 610 direct URL reference
if direct_url is not None:
direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
with _generate_file(direct_url_path) as direct_url_file:
direct_url_file.write(direct_url.to_json().encode("utf-8"))
generated.append(direct_url_path)
# Record the REQUESTED file
if requested:
requested_path = os.path.join(dest_info_dir, 'REQUESTED')
with open(requested_path, "wb"):
pass
generated.append(requested_path)
rows = get_csv_rows_for_installed(
record_rows,
installed=installed,
changed=changed,
generated=generated,
lib_dir=lib_dir)
# Record details of all files installed
record_path = os.path.join(dest_info_dir, 'RECORD')
with _generate_file(record_path, **csv_io_kwargs('w')) as record_file:
# The type mypy infers for record_file is different for Python 3
# (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly
# cast to typing.IO[str] as a workaround.
writer = csv.writer(cast('IO[str]', record_file))
writer.writerows(_normalized_outrows(rows))
@contextlib.contextmanager
def req_error_context(req_description):
# type: (str) -> Iterator[None]
try:
yield
except InstallationError as e:
message = "For req: {}. {}".format(req_description, e.args[0])
reraise(
InstallationError, InstallationError(message), sys.exc_info()[2]
)
def install_wheel(
name, # type: str
wheel_path, # type: str
scheme, # type: Scheme
req_description, # type: str
pycompile=True, # type: bool
noop=False, # type: bool
warn_script_location=True, # type: bool
direct_url=None, # type: Optional[DirectUrl]
requested=False, # type: bool
pool=None, # type: Optional[ContentAddressablePool]
):
# type: (...) -> None
with ZipFile(wheel_path, allowZip64=True) as z:
with req_error_context(req_description):
_install_wheel(
name=name,
wheel_zip=z,
wheel_path=wheel_path,
scheme=scheme,
pycompile=pycompile,
noop=noop,
warn_script_location=warn_script_location,
direct_url=direct_url,
requested=requested,
pool=pool,
)
| [
"[email protected]"
] | |
371ee8cb4b4f7e37636a6fbfe01b1f1ba8180744 | f8b5aafac15f408a48fabf853a918015c927e6fe | /bk_tomo/venv/venv27/bin/openstack | ef4239b2369d1cd6ac9e4daa1bf696a84ace7ec5 | [] | no_license | to30/tmp | bda1ac0ca3fc61e96c2a1c491367b698d7e97937 | ec809683970af6787728c2c41f161f416155982a | refs/heads/master | 2021-01-01T04:25:52.040770 | 2016-05-13T16:34:59 | 2016-05-13T16:34:59 | 58,756,087 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 243 | #!/home/tomo/venv/venv27/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from openstackclient.shell import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
c82fba8ab01d1d077471b17f9aead11553d75109 | f24edb38dd4f7de8a7683afbbc9ab2a4237a361e | /venv/lib/python3.6/site-packages/pip/_internal/network/auth.py | 64c6fd42fd13ad6212d709ee2ae711d0ca0f6507 | [] | no_license | ngecu/automate_django_data_filling | 882220f84a6b4af5484d4b136c740a803ccccfd2 | d6b7095904878f06e4aae6beb2156113a6145c21 | refs/heads/main | 2023-02-26T02:36:26.582387 | 2021-01-31T15:50:22 | 2021-01-31T15:50:22 | 317,846,258 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,537 | py | """Network Authentication Helpers
Contains interface (MultiDomainBasicAuth) and associated glue code for
providing credentials in the context of network requests.
"""
import logging
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.utils import get_netrc_auth
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._internal.utils.misc import (
ask,
ask_input,
ask_password,
remove_auth_from_url,
split_auth_netloc_from_url,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Any, Dict, List, Optional, Tuple
from pip._vendor.requests.models import Request, Response
from pip._internal.vcs.versioncontrol import AuthInfo
Credentials = Tuple[str, str, str]
logger = logging.getLogger(__name__)
try:
import keyring # noqa
except ImportError:
keyring = None
except Exception as exc:
logger.warning(
"Keyring is skipped due to an exception: %s", str(exc),
)
keyring = None
def get_keyring_auth(url, username):
# type: (str, str) -> Optional[AuthInfo]
"""Return the tuple auth for a given url from keyring."""
global keyring
if not url or not keyring:
return None
try:
try:
get_credential = keyring.get_credential
except AttributeError:
pass
else:
logger.debug("Getting credentials from keyring for %s", url)
cred = get_credential(url, username)
if cred is not None:
return cred.username, cred.password
return None
if username:
logger.debug("Getting password from keyring for %s", url)
password = keyring.get_password(url, username)
if password:
return username, password
except Exception as exc:
logger.warning(
"Keyring is skipped due to an exception: %s", str(exc),
)
keyring = None
return None
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True, index_urls=None):
# type: (bool, Optional[List[str]]) -> None
prompting = prompting
index_urls = index_urls
passwords = {} # type: Dict[str, AuthInfo]
# When the user is prompted to enter credentials and keyring is
# available, we will offer to save them. If the user accepts,
# this value is set to the credentials they entered. After the
# request authenticates, the caller should call
# ``save_credentials`` to save these.
_credentials_to_save = None # type: Optional[Credentials]
def _get_index_url(self, url):
# type: (str) -> Optional[str]
"""Return the original index URL matching the requested URL.
Cached or dynamically generated credentials may work against
the original index URL rather than just the netloc.
The provided url should have had its username and password
removed already. If the original index url had credentials then
they will be included in the return value.
Returns None if no matching index was found, or if --no-index
was specified by the user.
"""
if not url or not index_urls:
return None
for u in index_urls:
prefix = remove_auth_from_url(u).rstrip("/") + "/"
if url.startswith(prefix):
return u
return None
def _get_new_credentials(self, original_url, allow_netrc=True,
allow_keyring=True):
# type: (str, bool, bool) -> AuthInfo
"""Find and return credentials for the specified URL."""
# Split the credentials and netloc from the url.
url, netloc, url_user_password = split_auth_netloc_from_url(
original_url,
)
# Start with the credentials embedded in the url
username, password = url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in url for %s", netloc)
return url_user_password
# Find a matching index url for this request
index_url = _get_index_url(url)
if index_url:
# Split the credentials from the url.
index_info = split_auth_netloc_from_url(index_url)
if index_info:
index_url, _, index_url_user_password = index_info
logger.debug("Found index url %s", index_url)
# If an index URL was found, try its embedded credentials
if index_url and index_url_user_password[0] is not None:
username, password = index_url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in index url for %s", netloc)
return index_url_user_password
# Get creds from netrc if we still don't have them
if allow_netrc:
netrc_auth = get_netrc_auth(original_url)
if netrc_auth:
logger.debug("Found credentials in netrc for %s", netloc)
return netrc_auth
# If we don't have a password and keyring is available, use it.
if allow_keyring:
# The index url is more specific than the netloc, so try it first
kr_auth = (
get_keyring_auth(index_url, username) or
get_keyring_auth(netloc, username)
)
if kr_auth:
logger.debug("Found credentials in keyring for %s", netloc)
return kr_auth
return username, password
def _get_url_and_credentials(self, original_url):
# type: (str) -> Tuple[str, Optional[str], Optional[str]]
"""Return the credentials to use for the provided URL.
If allowed, netrc and keyring may be used to obtain the
correct credentials.
Returns (url_without_credentials, username, password). Note
that even if the original URL contains credentials, this
function may return a different username and password.
"""
url, netloc, _ = split_auth_netloc_from_url(original_url)
# Use any stored credentials that we have for this netloc
username, password = passwords.get(netloc, (None, None))
if username is None and password is None:
# No stored credentials. Acquire new credentials without prompting
# the user. (e.g. from netrc, keyring, or the URL itself)
username, password = _get_new_credentials(original_url)
if username is not None or password is not None:
# Convert the username and password if they're None, so that
# this netloc will show up as "cached" in the conditional above.
# Further, HTTPBasicAuth doesn't accept None, so it makes sense to
# cache the value that is going to be used.
username = username or ""
password = password or ""
# Store any acquired credentials.
passwords[netloc] = (username, password)
assert (
# Credentials were found
(username is not None and password is not None) or
# Credentials were not found
(username is None and password is None)
), "Could not load credentials from url: {}".format(original_url)
return url, username, password
def __call__(self, req):
# type: (Request) -> Request
# Get credentials for this request
url, username, password = _get_url_and_credentials(req.url)
# Set the url of the request to the url without any credentials
req.url = url
if username is not None and password is not None:
# Send the basic auth with this request
req = HTTPBasicAuth(username, password)(req)
# Attach a hook to handle 401 responses
req.register_hook("response", handle_401)
return req
# Factored out to allow for easy patching in tests
def _prompt_for_password(self, netloc):
# type: (str) -> Tuple[Optional[str], Optional[str], bool]
username = ask_input("User for {}: ".format(netloc))
if not username:
return None, None, False
auth = get_keyring_auth(netloc, username)
if auth and auth[0] is not None and auth[1] is not None:
return auth[0], auth[1], False
password = ask_password("Password: ")
return username, password, True
# Factored out to allow for easy patching in tests
def _should_save_password_to_keyring(self):
# type: () -> bool
if not keyring:
return False
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
def handle_401(self, resp, **kwargs):
# type: (Response, **Any) -> Response
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simply return the response
if not prompting:
return resp
parsed = urllib_parse.urlparse(resp.url)
# Prompt the user for a new username and password
username, password, save = _prompt_for_password(parsed.netloc)
# Store the new username and password to use for future requests
_credentials_to_save = None
if username is not None and password is not None:
passwords[parsed.netloc] = (username, password)
# Prompt to save the password to keyring
if save and _should_save_password_to_keyring():
_credentials_to_save = (parsed.netloc, username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
req.register_hook("response", warn_on_401)
# On successful request, save the credentials that were used to
# keyring. (Note that if the user responded "no" above, this member
# is not set and nothing will be saved.)
if _credentials_to_save:
req.register_hook("response", save_credentials)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def warn_on_401(self, resp, **kwargs):
# type: (Response, **Any) -> None
"""Response callback to warn about incorrect credentials."""
if resp.status_code == 401:
logger.warning(
'401 Error, Credentials not correct for %s', resp.request.url,
)
def save_credentials(self, resp, **kwargs):
# type: (Response, **Any) -> None
"""Response callback to save credentials on success."""
assert keyring is not None, "should never reach here without keyring"
if not keyring:
return
creds = _credentials_to_save
_credentials_to_save = None
if creds and resp.status_code < 400:
try:
logger.info('Saving credentials to keyring')
keyring.set_password(*creds)
except Exception:
logger.exception('Failed to save credentials')
| [
"[email protected]"
] | |
f59f062c20bb92420bb4ec172e9e3f763356ef80 | a140fe192fd643ce556fa34bf2f84ddbdb97f091 | /.history/quiz04_20200628163202.py | 43cf11c468754194ccbd6ea39f998db2cd2226d8 | [] | no_license | sangha0719/py-practice | 826f13cb422ef43992a69f822b9f04c2cb6d4815 | 6d71ce64bf91cc3bccee81378577d84ba9d9c121 | refs/heads/master | 2023-03-13T04:40:55.883279 | 2021-02-25T12:02:04 | 2021-02-25T12:02:04 | 342,230,484 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 826 | py | # 당신의 학교에서는 파이썬 코딩 대회를 주최합니다.
# 참석률을 높이기 위해 댓글 이벤트를 진행하기로 하였습니다.
# 댓글 작성자들 중에 추첨을 통해 1명은 치킨, 3명은 커피 쿠폰을 받게 됩니다.
# 추첨 프로그램을 작성하시오.
# 조건 1: 편의상 댓글은 20명이 작성하였고 아이디는 1~20 이라고 가정
# 조건 2: 댓글 내용과 상관 없이 무작위로 추첨하되 중복 불가
# 조건 3: random 모듈의 shuffle과 sample을 활용
# (출력 예제)
# -- 당첨자 발표 - -
# 치킨 당첨자: 1
# 커피 당첨자: [2, 3, 4]
# -- 축하합니다. --
# (활용 예제)
from random import *
# lst = [1, 2, 3, 4, 5]
# print(lst)
# shuffle(lst)
# print(lst)
# print(sample(lst, 1))
winner = random(20) + 1
print(winner) | [
"[email protected]"
] | |
4453fb58e33a80b6a1510a8e4e5c633e06b4cdc2 | e36985669a2b068dfb3e43b7f5870dc114bb158b | /python_code/dataExtraction.py | 7722d25b7d06ff6e71446c9ef08cf4b970e527d8 | [] | no_license | assassint2017/Data-extraction-UI | b3f0f43dc48e12c0da158bdb4a7c2c9dd5d92ab5 | d7e1b97100ad97b334f03b0fbf09c2a506339b1c | refs/heads/master | 2020-04-11T06:18:50.417214 | 2018-12-21T12:38:47 | 2018-12-21T12:38:47 | 161,577,841 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,732 | py | """
数据提取代码
"""
import datetime
import pandas as pd
from numpy import nan
#-------------------------需要设置的部分-------------------------------
# 设定csv文件路径 路径中不要出现中文
# csvDir = 'C:\\Users\\14595\\Desktop\\2018HB example .csv.gz'
# 设定提取csv文件路径 路径中不要出现中文
# extDir = 'C:\\Users\\14595\\Desktop\\ext.csv'
# 各哨点数量汇总文件路径 路径中不要出现中文
# summaryDir = 'C:\\Users\\14595\\Desktop\\summary.csv'
# 设定时间区间
# start = pd.Timestamp(datetime.date(year=2018, month=1, day=1))
# end = pd.Timestamp(datetime.date(year=2018, month=5, day=30))
# 设定选定的地区
# locs = [42010200, 42050300, 42050600]
#---------------------------------------------------------------------
def dataExtraction(csvDir, extDir, summaryDir, start, end, locs):
# 读取csv文件
csv = pd.read_csv(csvDir, compression='gzip', encoding='gbk')
# 时间日期格式化处理
csv['诊断时间'] = pd.to_datetime(csv['诊断时间'], format='%Y/%m/%d')
# 根据条件进行筛选
if start is None and end is None: # 如果只选择了地区编码
csv = csv[csv['报告单位地区编码'].isin(locs)]
elif locs is None: # 如果只选择了诊断时间
csv = csv[(csv['诊断时间'] >= start) & (csv['诊断时间'] <= end)]
else: # 如果两种条件都选择了
csv = csv[(csv['诊断时间'] >= start) & (csv['诊断时间'] <= end) & (csv['报告单位地区编码'].isin(locs))]
# 保存提取数据到csv文件
csv.to_csv(extDir, index=0, encoding='gbk')
def removeSpace(item):
"""
去除在输入过程中误键入的空格
"""
return item.strip()
csv['录卡用户所属单位'].apply(removeSpace)
temp = pd.value_counts(csv['录卡用户所属单位'])
codes = []
for hospital in list(temp.index):
index = csv[csv['录卡用户所属单位'] == hospital].index.tolist()[0]
codes.append(csv['报告单位地区编码'][index])
summary = pd.DataFrame()
summary['报告单位地区编码'] = codes
summary['报告单位'] = list(temp.index)
summary['病例数'] = temp.values
summary.sort_values(by=['报告单位地区编码'], inplace=True)
summary.reset_index(drop=True, inplace=True)
nanlist = []
for i in range(1, len(summary['报告单位地区编码'])):
if summary.loc[i, '报告单位地区编码'] == summary.loc[i - 1, '报告单位地区编码']:
nanlist.append(i)
for i in nanlist:
summary.loc[i, '报告单位地区编码'] = nan
summary.to_csv(summaryDir, index=False, encoding='gbk')
| [
"[email protected]"
] | |
99bb440e3d91a657af83b6b5699a5675b2c46f7c | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03136/s297842517.py | a2a6230496234027046d6691748a5f445af9dd64 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | n = int(input())
a = list(map(int, input().split()))
b = [0]*n
b = sorted(a)
c = 0
for i in range(n-1):
c += b[i]
if c>b[n-1]:
print("Yes")
else:
print("No") | [
"[email protected]"
] | |
d5c4cac739d6c9ad1a641938dda9973c912c84c5 | e944d288093c9234c3a6a2a76ffe4e3c9b236cf1 | /annotation_utils/coco/structs/__init__.py | 66e257d06be64002c0bce0580e1d58fd6c768ce7 | [
"MIT"
] | permissive | darwinharianto/annotation_utils | 598b043345790580e99f34f159b9612b9b1bcd52 | 1cbdadaa28ff945e705dd7b806dda395e32ab23c | refs/heads/master | 2022-04-27T01:20:10.738778 | 2020-04-27T09:23:57 | 2020-04-27T09:23:57 | 255,525,300 | 0 | 0 | MIT | 2020-04-27T09:23:59 | 2020-04-14T06:10:57 | Python | UTF-8 | Python | false | false | 245 | py | from .objects import COCO_Info, COCO_License, COCO_Image, \
COCO_Annotation, COCO_Category
from .handlers import COCO_License_Handler, COCO_Image_Handler, \
COCO_Annotation_Handler, COCO_Category_Handler
from .dataset import COCO_Dataset | [
"[email protected]"
] | |
a1f02577c0adfa04d1396283c0f946dca6808285 | 77ee1f677ab2ececb821a11be128b76bcf0e8d6f | /electrum_mona/gui/qt/lightning_dialog.py | 1d709aed9935b2c01bce4e473c6c8bdd4f25e9d9 | [
"MIT"
] | permissive | zcore-dev/electrum-mona | c74e6142a0f34721be70dba68d524ae9ce03179c | 2beb0c9c7794e8b03d1725bae41ee8b792c57275 | refs/heads/master | 2020-08-22T15:32:55.604727 | 2019-10-21T22:56:29 | 2019-10-21T22:56:29 | 216,427,159 | 0 | 0 | MIT | 2019-10-20T21:03:48 | 2019-10-20T21:03:48 | null | UTF-8 | Python | false | false | 3,658 | py | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from PyQt5.QtGui import QStandardItemModel, QStandardItem
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QDialog, QWidget, QLabel, QVBoxLayout, QCheckBox,
QGridLayout, QPushButton, QLineEdit, QTabWidget)
from electrum_mona.i18n import _
from .util import HelpLabel, MyTreeView, Buttons
class LightningDialog(QDialog):
def __init__(self, gui_object):
QDialog.__init__(self)
self.gui_object = gui_object
self.config = gui_object.config
self.network = gui_object.daemon.network
self.setWindowTitle(_('Lightning Network'))
self.setMinimumSize(600, 20)
vbox = QVBoxLayout(self)
self.num_peers = QLabel('')
vbox.addWidget(self.num_peers)
self.num_nodes = QLabel('')
vbox.addWidget(self.num_nodes)
self.num_channels = QLabel('')
vbox.addWidget(self.num_channels)
self.status = QLabel('')
vbox.addWidget(self.status)
vbox.addStretch(1)
b = QPushButton(_('Close'))
b.clicked.connect(self.close)
vbox.addLayout(Buttons(b))
self.network.register_callback(self.on_channel_db, ['channel_db'])
self.network.register_callback(self.set_num_peers, ['gossip_peers'])
self.network.register_callback(self.set_unknown_channels, ['unknown_channels'])
self.network.channel_db.update_counts() # trigger callback
self.set_num_peers('', self.network.lngossip.num_peers())
self.set_unknown_channels('', len(self.network.lngossip.unknown_ids))
def on_channel_db(self, event, num_nodes, num_channels, num_policies):
self.num_nodes.setText(_(f'{num_nodes} nodes'))
self.num_channels.setText(_(f'{num_channels} channels'))
def set_num_peers(self, event, num_peers):
self.num_peers.setText(_(f'Connected to {num_peers} peers'))
def set_unknown_channels(self, event, unknown):
self.status.setText(_(f'Requesting {unknown} channels...') if unknown else '')
def is_hidden(self):
return self.isMinimized() or self.isHidden()
def show_or_hide(self):
if self.is_hidden():
self.bring_to_top()
else:
self.hide()
def bring_to_top(self):
self.show()
self.raise_()
def closeEvent(self, event):
self.gui_object.lightning_dialog = None
event.accept()
| [
"[email protected]"
] | |
79c9ac4f57d7c75785c6e238248c49297bcd93e6 | a40d5c5cd0fcc2410e3200f40f6a79f7201b0193 | /kubernetes/client/models/v1beta1_custom_resource_column_definition.py | f844f995b1a28c3f912663019e6dab34922ac0e7 | [
"Apache-2.0"
] | permissive | Unacademy/kubernetes-client | 662cdc2b9fe6df43301e32427e48b1b2715773ca | b7f9c740a82b4585478d052c8032495cdeb3b331 | refs/heads/master | 2023-06-24T05:57:27.226613 | 2022-12-14T16:29:58 | 2022-12-14T16:29:58 | 181,669,794 | 0 | 0 | Apache-2.0 | 2023-06-20T13:05:37 | 2019-04-16T10:43:37 | Python | UTF-8 | Python | false | false | 8,689 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1CustomResourceColumnDefinition(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'json_path': 'str',
'description': 'str',
'format': 'str',
'name': 'str',
'priority': 'int',
'type': 'str'
}
attribute_map = {
'json_path': 'JSONPath',
'description': 'description',
'format': 'format',
'name': 'name',
'priority': 'priority',
'type': 'type'
}
def __init__(self, json_path=None, description=None, format=None, name=None, priority=None, type=None):
"""
V1beta1CustomResourceColumnDefinition - a model defined in Swagger
"""
self._json_path = None
self._description = None
self._format = None
self._name = None
self._priority = None
self._type = None
self.discriminator = None
self.json_path = json_path
if description is not None:
self.description = description
if format is not None:
self.format = format
self.name = name
if priority is not None:
self.priority = priority
self.type = type
@property
def json_path(self):
"""
Gets the json_path of this V1beta1CustomResourceColumnDefinition.
JSONPath is a simple JSON path, i.e. with array notation.
:return: The json_path of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._json_path
@json_path.setter
def json_path(self, json_path):
"""
Sets the json_path of this V1beta1CustomResourceColumnDefinition.
JSONPath is a simple JSON path, i.e. with array notation.
:param json_path: The json_path of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
if json_path is None:
raise ValueError("Invalid value for `json_path`, must not be `None`")
self._json_path = json_path
@property
def description(self):
"""
Gets the description of this V1beta1CustomResourceColumnDefinition.
description is a human readable description of this column.
:return: The description of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this V1beta1CustomResourceColumnDefinition.
description is a human readable description of this column.
:param description: The description of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
self._description = description
@property
def format(self):
"""
Gets the format of this V1beta1CustomResourceColumnDefinition.
format is an optional OpenAPI type definition for this column. The 'name' format is applied to the primary identifier column to assist in clients identifying column is the resource name. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:return: The format of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._format
@format.setter
def format(self, format):
"""
Sets the format of this V1beta1CustomResourceColumnDefinition.
format is an optional OpenAPI type definition for this column. The 'name' format is applied to the primary identifier column to assist in clients identifying column is the resource name. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:param format: The format of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
self._format = format
@property
def name(self):
"""
Gets the name of this V1beta1CustomResourceColumnDefinition.
name is a human readable name for the column.
:return: The name of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this V1beta1CustomResourceColumnDefinition.
name is a human readable name for the column.
:param name: The name of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._name = name
@property
def priority(self):
"""
Gets the priority of this V1beta1CustomResourceColumnDefinition.
priority is an integer defining the relative importance of this column compared to others. Lower numbers are considered higher priority. Columns that may be omitted in limited space scenarios should be given a higher priority.
:return: The priority of this V1beta1CustomResourceColumnDefinition.
:rtype: int
"""
return self._priority
@priority.setter
def priority(self, priority):
"""
Sets the priority of this V1beta1CustomResourceColumnDefinition.
priority is an integer defining the relative importance of this column compared to others. Lower numbers are considered higher priority. Columns that may be omitted in limited space scenarios should be given a higher priority.
:param priority: The priority of this V1beta1CustomResourceColumnDefinition.
:type: int
"""
self._priority = priority
@property
def type(self):
"""
Gets the type of this V1beta1CustomResourceColumnDefinition.
type is an OpenAPI type definition for this column. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:return: The type of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this V1beta1CustomResourceColumnDefinition.
type is an OpenAPI type definition for this column. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:param type: The type of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`")
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1CustomResourceColumnDefinition):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
] | |
fc9b0c269aecdb44c4736fe6b9da03555f7de8e3 | 31622dd16963b459ac6eec71fcf54e4d243ac773 | /edu_sharing_client/models/license.py | 073b8ab7d8a99b38a95e9902e2a8e4a23e2cd02e | [] | no_license | torsten-simon/oeh-search-etl | 95e6e92698a97c98ef9d5b02076edcf993736d6f | eacdadcd8af169cb54629db0d2d46a5616f854a6 | refs/heads/master | 2023-04-16T05:08:41.194239 | 2020-11-16T09:51:59 | 2020-11-16T09:51:59 | 318,169,232 | 0 | 0 | null | 2023-04-03T23:04:46 | 2020-12-03T11:20:44 | null | UTF-8 | Python | false | false | 3,484 | py | # coding: utf-8
"""
edu-sharing Repository REST API
The public restful API of the edu-sharing repository. # noqa: E501
OpenAPI spec version: 1.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class License(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'icon': 'str',
'url': 'str'
}
attribute_map = {
'icon': 'icon',
'url': 'url'
}
def __init__(self, icon=None, url=None): # noqa: E501
"""License - a model defined in Swagger""" # noqa: E501
self._icon = None
self._url = None
self.discriminator = None
if icon is not None:
self.icon = icon
if url is not None:
self.url = url
@property
def icon(self):
"""Gets the icon of this License. # noqa: E501
:return: The icon of this License. # noqa: E501
:rtype: str
"""
return self._icon
@icon.setter
def icon(self, icon):
"""Sets the icon of this License.
:param icon: The icon of this License. # noqa: E501
:type: str
"""
self._icon = icon
@property
def url(self):
"""Gets the url of this License. # noqa: E501
:return: The url of this License. # noqa: E501
:rtype: str
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this License.
:param url: The url of this License. # noqa: E501
:type: str
"""
self._url = url
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(License, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, License):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
91b20ea47434b099c8ae47c90abfab4af64dad34 | 89cd8b77ad5171c336cc60b2133fe6468a6cb53f | /Module01_CZ/day1_basics/04-代码/day1/20_重要演示__________变量的定义.py | f4f737892f50ed4f9b166828e1067e18cd29954f | [
"MIT"
] | permissive | fenglihanxiao/Python | 75178f6b6b0c53345e1ed54226ea645216572d6c | 872baf3a3a5ee42740161152605ca2b1ddf4cd30 | refs/heads/master | 2021-05-23T18:49:20.656433 | 2020-04-29T01:06:21 | 2020-04-29T01:06:21 | 253,199,073 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | """
使用变量保存个人信息:
姓名:张传智
性别:男
年龄:21
电话:18800008888
"""
| [
"[email protected]"
] | |
0289b4bcf761b49c33907f4f98a3ded9f257d4fa | 429a8441bb9730dcf0e33fedcb5f3672a731b3e7 | /xero_python/accounting/models/tax_rate.py | 3105e7e85477aa221bd8d79e66b609249374e58e | [
"MIT"
] | permissive | gregsteelxinja/xero-python | 1a26ec3b05ea156dd6848f2ec313c72e9f39b0e2 | d0473ba91099de3464b3dffa377df5a11ad95afc | refs/heads/master | 2022-12-16T10:54:11.424971 | 2020-09-01T01:00:23 | 2020-09-01T01:00:23 | 291,526,551 | 0 | 0 | null | 2020-08-30T18:16:48 | 2020-08-30T18:16:48 | null | UTF-8 | Python | false | false | 14,802 | py | # coding: utf-8
"""
Accounting API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 2.2.14
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class TaxRate(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"name": "str",
"tax_type": "str",
"tax_components": "list[TaxComponent]",
"status": "str",
"report_tax_type": "str",
"can_apply_to_assets": "bool",
"can_apply_to_equity": "bool",
"can_apply_to_expenses": "bool",
"can_apply_to_liabilities": "bool",
"can_apply_to_revenue": "bool",
"display_tax_rate": "float",
"effective_rate": "float",
}
attribute_map = {
"name": "Name",
"tax_type": "TaxType",
"tax_components": "TaxComponents",
"status": "Status",
"report_tax_type": "ReportTaxType",
"can_apply_to_assets": "CanApplyToAssets",
"can_apply_to_equity": "CanApplyToEquity",
"can_apply_to_expenses": "CanApplyToExpenses",
"can_apply_to_liabilities": "CanApplyToLiabilities",
"can_apply_to_revenue": "CanApplyToRevenue",
"display_tax_rate": "DisplayTaxRate",
"effective_rate": "EffectiveRate",
}
def __init__(
self,
name=None,
tax_type=None,
tax_components=None,
status=None,
report_tax_type=None,
can_apply_to_assets=None,
can_apply_to_equity=None,
can_apply_to_expenses=None,
can_apply_to_liabilities=None,
can_apply_to_revenue=None,
display_tax_rate=None,
effective_rate=None,
): # noqa: E501
"""TaxRate - a model defined in OpenAPI""" # noqa: E501
self._name = None
self._tax_type = None
self._tax_components = None
self._status = None
self._report_tax_type = None
self._can_apply_to_assets = None
self._can_apply_to_equity = None
self._can_apply_to_expenses = None
self._can_apply_to_liabilities = None
self._can_apply_to_revenue = None
self._display_tax_rate = None
self._effective_rate = None
self.discriminator = None
if name is not None:
self.name = name
if tax_type is not None:
self.tax_type = tax_type
if tax_components is not None:
self.tax_components = tax_components
if status is not None:
self.status = status
if report_tax_type is not None:
self.report_tax_type = report_tax_type
if can_apply_to_assets is not None:
self.can_apply_to_assets = can_apply_to_assets
if can_apply_to_equity is not None:
self.can_apply_to_equity = can_apply_to_equity
if can_apply_to_expenses is not None:
self.can_apply_to_expenses = can_apply_to_expenses
if can_apply_to_liabilities is not None:
self.can_apply_to_liabilities = can_apply_to_liabilities
if can_apply_to_revenue is not None:
self.can_apply_to_revenue = can_apply_to_revenue
if display_tax_rate is not None:
self.display_tax_rate = display_tax_rate
if effective_rate is not None:
self.effective_rate = effective_rate
@property
def name(self):
"""Gets the name of this TaxRate. # noqa: E501
Name of tax rate # noqa: E501
:return: The name of this TaxRate. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this TaxRate.
Name of tax rate # noqa: E501
:param name: The name of this TaxRate. # noqa: E501
:type: str
"""
self._name = name
@property
def tax_type(self):
"""Gets the tax_type of this TaxRate. # noqa: E501
The tax type # noqa: E501
:return: The tax_type of this TaxRate. # noqa: E501
:rtype: str
"""
return self._tax_type
@tax_type.setter
def tax_type(self, tax_type):
"""Sets the tax_type of this TaxRate.
The tax type # noqa: E501
:param tax_type: The tax_type of this TaxRate. # noqa: E501
:type: str
"""
self._tax_type = tax_type
@property
def tax_components(self):
"""Gets the tax_components of this TaxRate. # noqa: E501
See TaxComponents # noqa: E501
:return: The tax_components of this TaxRate. # noqa: E501
:rtype: list[TaxComponent]
"""
return self._tax_components
@tax_components.setter
def tax_components(self, tax_components):
"""Sets the tax_components of this TaxRate.
See TaxComponents # noqa: E501
:param tax_components: The tax_components of this TaxRate. # noqa: E501
:type: list[TaxComponent]
"""
self._tax_components = tax_components
@property
def status(self):
"""Gets the status of this TaxRate. # noqa: E501
See Status Codes # noqa: E501
:return: The status of this TaxRate. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this TaxRate.
See Status Codes # noqa: E501
:param status: The status of this TaxRate. # noqa: E501
:type: str
"""
allowed_values = [
"ACTIVE",
"DELETED",
"ARCHIVED",
"PENDING",
"None",
] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}".format( # noqa: E501
status, allowed_values
)
)
self._status = status
@property
def report_tax_type(self):
"""Gets the report_tax_type of this TaxRate. # noqa: E501
See ReportTaxTypes # noqa: E501
:return: The report_tax_type of this TaxRate. # noqa: E501
:rtype: str
"""
return self._report_tax_type
@report_tax_type.setter
def report_tax_type(self, report_tax_type):
"""Sets the report_tax_type of this TaxRate.
See ReportTaxTypes # noqa: E501
:param report_tax_type: The report_tax_type of this TaxRate. # noqa: E501
:type: str
"""
allowed_values = [
"AVALARA",
"BASEXCLUDED",
"CAPITALSALESOUTPUT",
"CAPITALEXPENSESINPUT",
"ECOUTPUT",
"ECOUTPUTSERVICES",
"ECINPUT",
"ECACQUISITIONS",
"EXEMPTEXPENSES",
"EXEMPTINPUT",
"EXEMPTOUTPUT",
"GSTONIMPORTS",
"INPUT",
"INPUTTAXED",
"MOSSSALES",
"NONE",
"NONEOUTPUT",
"OUTPUT",
"PURCHASESINPUT",
"SALESOUTPUT",
"EXEMPTCAPITAL",
"EXEMPTEXPORT",
"CAPITALEXINPUT",
"GSTONCAPIMPORTS",
"GSTONCAPITALIMPORTS",
"REVERSECHARGES",
"PAYMENTS",
"INVOICE",
"CASH",
"ACCRUAL",
"FLATRATECASH",
"FLATRATEACCRUAL",
"ACCRUALS",
"TXCA",
"SRCAS",
"DSOUTPUT",
"BLINPUT2",
"EPINPUT",
"IMINPUT2",
"MEINPUT",
"IGDSINPUT2",
"ESN33OUTPUT",
"OPINPUT",
"OSOUTPUT",
"TXN33INPUT",
"TXESSINPUT",
"TXREINPUT",
"TXPETINPUT",
"NRINPUT",
"ES33OUTPUT",
"ZERORATEDINPUT",
"ZERORATEDOUTPUT",
"DRCHARGESUPPLY",
"DRCHARGE",
"CAPINPUT",
"CAPIMPORTS",
"IMINPUT",
"INPUT2",
"CIUINPUT",
"SRINPUT",
"OUTPUT2",
"SROUTPUT",
"CAPOUTPUT",
"SROUTPUT2",
"CIUOUTPUT",
"ZROUTPUT",
"ZREXPORT",
"ACC28PLUS",
"ACCUPTO28",
"OTHEROUTPUT",
"SHOUTPUT",
"ZRINPUT",
"BADDEBT",
"OTHERINPUT",
"None",
] # noqa: E501
if report_tax_type not in allowed_values:
raise ValueError(
"Invalid value for `report_tax_type` ({0}), must be one of {1}".format( # noqa: E501
report_tax_type, allowed_values
)
)
self._report_tax_type = report_tax_type
@property
def can_apply_to_assets(self):
"""Gets the can_apply_to_assets of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for asset accounts i.e. true,false # noqa: E501
:return: The can_apply_to_assets of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_assets
@can_apply_to_assets.setter
def can_apply_to_assets(self, can_apply_to_assets):
"""Sets the can_apply_to_assets of this TaxRate.
Boolean to describe if tax rate can be used for asset accounts i.e. true,false # noqa: E501
:param can_apply_to_assets: The can_apply_to_assets of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_assets = can_apply_to_assets
@property
def can_apply_to_equity(self):
"""Gets the can_apply_to_equity of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for equity accounts i.e true,false # noqa: E501
:return: The can_apply_to_equity of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_equity
@can_apply_to_equity.setter
def can_apply_to_equity(self, can_apply_to_equity):
"""Sets the can_apply_to_equity of this TaxRate.
Boolean to describe if tax rate can be used for equity accounts i.e true,false # noqa: E501
:param can_apply_to_equity: The can_apply_to_equity of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_equity = can_apply_to_equity
@property
def can_apply_to_expenses(self):
"""Gets the can_apply_to_expenses of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for expense accounts i.e. true,false # noqa: E501
:return: The can_apply_to_expenses of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_expenses
@can_apply_to_expenses.setter
def can_apply_to_expenses(self, can_apply_to_expenses):
"""Sets the can_apply_to_expenses of this TaxRate.
Boolean to describe if tax rate can be used for expense accounts i.e. true,false # noqa: E501
:param can_apply_to_expenses: The can_apply_to_expenses of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_expenses = can_apply_to_expenses
@property
def can_apply_to_liabilities(self):
"""Gets the can_apply_to_liabilities of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for liability accounts i.e. true,false # noqa: E501
:return: The can_apply_to_liabilities of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_liabilities
@can_apply_to_liabilities.setter
def can_apply_to_liabilities(self, can_apply_to_liabilities):
"""Sets the can_apply_to_liabilities of this TaxRate.
Boolean to describe if tax rate can be used for liability accounts i.e. true,false # noqa: E501
:param can_apply_to_liabilities: The can_apply_to_liabilities of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_liabilities = can_apply_to_liabilities
@property
def can_apply_to_revenue(self):
"""Gets the can_apply_to_revenue of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for revenue accounts i.e. true,false # noqa: E501
:return: The can_apply_to_revenue of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_revenue
@can_apply_to_revenue.setter
def can_apply_to_revenue(self, can_apply_to_revenue):
"""Sets the can_apply_to_revenue of this TaxRate.
Boolean to describe if tax rate can be used for revenue accounts i.e. true,false # noqa: E501
:param can_apply_to_revenue: The can_apply_to_revenue of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_revenue = can_apply_to_revenue
@property
def display_tax_rate(self):
"""Gets the display_tax_rate of this TaxRate. # noqa: E501
Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:return: The display_tax_rate of this TaxRate. # noqa: E501
:rtype: float
"""
return self._display_tax_rate
@display_tax_rate.setter
def display_tax_rate(self, display_tax_rate):
"""Sets the display_tax_rate of this TaxRate.
Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:param display_tax_rate: The display_tax_rate of this TaxRate. # noqa: E501
:type: float
"""
self._display_tax_rate = display_tax_rate
@property
def effective_rate(self):
"""Gets the effective_rate of this TaxRate. # noqa: E501
Effective Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:return: The effective_rate of this TaxRate. # noqa: E501
:rtype: float
"""
return self._effective_rate
@effective_rate.setter
def effective_rate(self, effective_rate):
"""Sets the effective_rate of this TaxRate.
Effective Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:param effective_rate: The effective_rate of this TaxRate. # noqa: E501
:type: float
"""
self._effective_rate = effective_rate
| [
"[email protected]"
] | |
d2edaeec8fdcd119849df0305b0cb817b3235ebe | 8d9318a33afc2c3b5ca8ac99fce0d8544478c94a | /Books/Casandra DB/opscenter-5.1.0/lib/py/orbited/proxy.py | 4c0e80c1f97cce4bb513bffb9be5583f06edd599 | [] | no_license | tushar239/git-large-repo | e30aa7b1894454bf00546312a3fb595f6dad0ed6 | 9ee51112596e5fc3a7ab2ea97a86ec6adc677162 | refs/heads/master | 2021-01-12T13:48:43.280111 | 2016-11-01T22:14:51 | 2016-11-01T22:14:51 | 69,609,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:ba1b92cda51cc0fbe137994a7b857befa45aa64d45009e4fb34ed1df78d0f3fc
size 5501
| [
"[email protected]"
] | |
8f55ee77bb2e6f0f501c6aae41fe353d5946e7ed | 48f092fd8191b0218df8605dc7125e526764e59e | /NestedLoops/venv/Scripts/pip-script.py | 1f860a6a2d99a98a14ef6f35a31d2812b31131f3 | [] | no_license | LalityaSawant/Python-Projects | 2edb430c094fe3d6b4e706cc61f885aa07e24dff | b142708256e26867f09b3063f5f3fffa305ec496 | refs/heads/master | 2020-05-01T03:00:26.012301 | 2019-03-23T22:09:33 | 2019-03-23T22:09:33 | 177,235,109 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | #!C:\Users\lsawant\Documents\Learning\Python\PycharmProjects\NestedLoops\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip')()
)
| [
"[email protected]"
] | |
10cefb112ffc8a72f2ddcd285ff5b6f871ecf497 | 41523dd4871e8ed1043d2b3ddf73417fcbdde209 | /day16/map函数.py | 7a19700236dcf557aafb01afb59951babcaa5d8d | [] | no_license | WayneChen1994/Python1805 | 2aa1c611f8902b8373b8c9a4e06354c25f8826d6 | a168cd3b7749afc326ec4326db413378fd3677d5 | refs/heads/master | 2020-03-30T23:19:00.773288 | 2018-11-02T10:47:40 | 2018-11-02T10:47:40 | 151,697,105 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 943 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# author:Wayne
'''
map函数的功能:
将传入的函数依次作用于序列中的每一个对象,
然后将其作用的结果作为一个迭代器返回
'''
'''
需求:将列表中的["1", "2", "3", "4", "5"]
转为[1, 2, 3, 4, 5],写成一个函数。
'''
def func(alist):
return [int(x) for x in alist]
list1 = ["1", "2", "3", "4", "5"]
print(list1)
print(func(list1))
res = map(int, list1)
print(list(res))
'''
map(func,lsd)
参数一:要作用函数,【此函数有且只有一个参数】
参数二:要作用的序列
'''
'''
使用map函数,求n的序列[1, 4, 9, ..., n^2], 一行代码实现上述的要求,n从控制台输入。
'''
def func2(n):
return list(map(lambda x:x**2, range(1, n+1)))
num = int(input("请输入n的值:"))
print(func2(num))
print(list(map(lambda n:n*n, range(1, int(input("请输入一个整数:"))+1))))
| [
"[email protected]"
] | |
7b21a9e794befbf4b2268bb9e211d4e4aff762d6 | 53d03e48ca88e05fb134be8a2d3dda9b6079216e | /fabfile.py | ca82fcc42642e2e6fffd7cd3c79a84a7357a5b66 | [] | no_license | myungseokang/djangogirls | 3b9e18acb7b7747ca04c32dbebb070fc15b88966 | dd7f82131de66e0732553af2b1e3c170a6f44b5f | refs/heads/master | 2021-06-18T09:59:14.143057 | 2017-07-03T00:16:00 | 2017-07-03T00:16:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | true | false | 6,054 | py | from fabric.contrib.files import append, exists, sed, put
from fabric.api import env, local, run, sudo
import random
import os
import json
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# deploy.json파일을 불러와 envs변수에 저장합니다.
with open(os.path.join(PROJECT_DIR, "deploy.json")) as f:
envs = json.loads(f.read())
REPO_URL = envs['REPO_URL']
PROJECT_NAME = envs['PROJECT_NAME']
REMOTE_HOST = envs['REMOTE_HOST']
REMOTE_USER = envs['REMOTE_USER']
STATIC_ROOT_NAME = envs['STATIC_ROOT']
STATIC_URL_NAME = envs['STATIC_URL']
MEDIA_ROOT = envs['MEDIA_ROOT']
env.user = REMOTE_USER
username = env.user
env.hosts = [
REMOTE_HOST,
]
project_folder = '/home/{}/{}'.format(env.user, PROJECT_NAME)
apt_requirements = [
'ufw',
'curl',
'git',
'python3-dev',
'python3-pip',
'build-essential',
'python3-setuptools',
'apache2',
'libapache2-mod-wsgi-py3',
'libssl-dev',
'libxml2-dev',
'libjpeg8-dev',
'zlib1g-dev',
]
def new_server():
setup()
deploy()
def setup():
_register_ssh_key()
_get_latest_apt()
_install_apt_requirements(apt_requirements)
_make_virtualenv()
def deploy():
_get_latest_source()
_update_settings()
_update_virtualenv()
_update_static_files()
_update_database()
_make_virtualhost()
_grant_apache2()
_grant_sqlite3()
_restart_apache2()
def create_superuser():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
run('cd %s && %s/bin/python3 manage.py createsuperuser' % (
project_folder, virtualenv_folder
))
def _register_ssh_key():
local("ssh-keyscan -H {} >> {}".format(REMOTE_HOST, os.path.expanduser('~/.ssh/known_hosts')))
def _get_latest_apt():
update_or_not = input('Would U install Apache2/Python3 ?\n'
'[y/n, default: y]: ')
if update_or_not != 'n':
sudo('sudo apt-get update && sudo apt-get -y upgrade')
def _install_apt_requirements(apt_requirements):
reqs = ''
for req in apt_requirements:
reqs += (' ' + req)
sudo('sudo apt-get -y install {}'.format(reqs))
def _make_virtualenv():
if not exists('~/.virtualenvs'):
script = '''"# python virtualenv settings
export WORKON_HOME=~/.virtualenvs
export VIRTUALENVWRAPPER_PYTHON="$(command \which python3)" # location of python3
source /usr/local/bin/virtualenvwrapper.sh"'''
run('mkdir ~/.virtualenvs')
sudo('sudo pip3 install virtualenv virtualenvwrapper')
run('echo {} >> ~/.bashrc'.format(script))
def _get_latest_source():
if exists(project_folder + '/.git'):
run('cd %s && git fetch' % (project_folder,))
else:
run('git clone %s %s' % (REPO_URL, project_folder))
current_commit = local("git log -n 1 --format=%H", capture=True)
run('cd %s && git reset --hard %s' % (project_folder, current_commit))
def _update_settings():
settings_path = project_folder + '/{}/settings.py'.format(PROJECT_NAME)
sed(settings_path, "DEBUG = True", "DEBUG = False")
sed(settings_path,
'ALLOWED_HOSTS = .+$',
'ALLOWED_HOSTS = ["%s"]' % (REMOTE_HOST,)
)
secret_key_file = project_folder + '/{}/secret_key.py'.format(PROJECT_NAME)
if not exists(secret_key_file):
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
key = ''.join(random.SystemRandom().choice(chars) for _ in range(50))
append(secret_key_file, "SECRET_KEY = '%s'" % (key,))
append(settings_path, '\nfrom .secret_key import SECRET_KEY')
def _update_virtualenv():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
if not exists(virtualenv_folder + '/bin/pip'):
run('cd /home/%s/.virtualenvs && virtualenv %s' % (env.user, PROJECT_NAME))
run('%s/bin/pip install "django<2"' % (
virtualenv_folder
))
def _update_static_files():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
run('cd %s && %s/bin/python3 manage.py collectstatic --noinput' % (
project_folder, virtualenv_folder
))
def _update_database():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
run('cd %s && %s/bin/python3 manage.py migrate --noinput' % (
project_folder, virtualenv_folder
))
def _make_virtualhost():
script = """'<VirtualHost *:80>
ServerName {servername}
Alias /{static_url} /home/{username}/{project_name}/{static_root}
Alias /{media_url} /home/{username}/{project_name}/{media_url}
<Directory /home/{username}/{project_name}/{media_url}>
Require all granted
</Directory>
<Directory /home/{username}/{project_name}/{static_root}>
Require all granted
</Directory>
<Directory /home/{username}/{project_name}/{project_name}>
<Files wsgi.py>
Require all granted
</Files>
</Directory>
WSGIDaemonProcess {project_name} python-home=/home/{username}/.virtualenvs/{project_name} python-path=/home/{username}/{project_name}
WSGIProcessGroup {project_name}
WSGIScriptAlias / /home/{username}/{project_name}/{project_name}/wsgi.py
ErrorLog ${{APACHE_LOG_DIR}}/error.log
CustomLog ${{APACHE_LOG_DIR}}/access.log combined
</VirtualHost>'""".format(
static_root=STATIC_ROOT_NAME,
username=env.user,
project_name=PROJECT_NAME,
static_url=STATIC_URL_NAME,
servername=REMOTE_HOST,
media_url=MEDIA_ROOT
)
sudo('echo {} > /etc/apache2/sites-available/{}.conf'.format(script, PROJECT_NAME))
sudo('a2ensite {}.conf'.format(PROJECT_NAME))
def _grant_apache2():
sudo('sudo chown -R :www-data ~/{}'.format(PROJECT_NAME))
def _grant_sqlite3():
sudo('sudo chmod 775 ~/{}/db.sqlite3'.format(PROJECT_NAME))
def _restart_apache2():
sudo('sudo service apache2 restart') | [
"[email protected]"
] | |
38acb8c211006d953999bf2dfc3090c9f9313ea5 | ee27325f6a3e6a2d1f5e004aa60f5974ad864ae9 | /contrib/python/plotly/py3/plotly/validators/contourcarpet/__init__.py | 09c50961c6d1e808ad2e54b12da590314f6b6cc2 | [
"Apache-2.0",
"MIT"
] | permissive | alvinahmadov/catboost | f32d2b16be9db7439e429c88feb5676de842fc89 | a6e0caa4779b31199f535cf43b09879d7c653abe | refs/heads/master | 2023-06-12T19:29:52.028508 | 2023-05-11T18:33:03 | 2023-05-11T18:33:03 | 202,584,937 | 0 | 0 | Apache-2.0 | 2019-08-15T17:35:23 | 2019-08-15T17:35:23 | null | UTF-8 | Python | false | false | 4,621 | py | import sys
from typing import TYPE_CHECKING
if sys.version_info < (3, 7) or TYPE_CHECKING:
from ._zsrc import ZsrcValidator
from ._zmin import ZminValidator
from ._zmid import ZmidValidator
from ._zmax import ZmaxValidator
from ._zauto import ZautoValidator
from ._z import ZValidator
from ._yaxis import YaxisValidator
from ._xaxis import XaxisValidator
from ._visible import VisibleValidator
from ._uirevision import UirevisionValidator
from ._uid import UidValidator
from ._transpose import TransposeValidator
from ._textsrc import TextsrcValidator
from ._text import TextValidator
from ._stream import StreamValidator
from ._showscale import ShowscaleValidator
from ._showlegend import ShowlegendValidator
from ._reversescale import ReversescaleValidator
from ._opacity import OpacityValidator
from ._ncontours import NcontoursValidator
from ._name import NameValidator
from ._metasrc import MetasrcValidator
from ._meta import MetaValidator
from ._line import LineValidator
from ._legendwidth import LegendwidthValidator
from ._legendrank import LegendrankValidator
from ._legendgrouptitle import LegendgrouptitleValidator
from ._legendgroup import LegendgroupValidator
from ._idssrc import IdssrcValidator
from ._ids import IdsValidator
from ._hovertextsrc import HovertextsrcValidator
from ._hovertext import HovertextValidator
from ._fillcolor import FillcolorValidator
from ._db import DbValidator
from ._da import DaValidator
from ._customdatasrc import CustomdatasrcValidator
from ._customdata import CustomdataValidator
from ._contours import ContoursValidator
from ._colorscale import ColorscaleValidator
from ._colorbar import ColorbarValidator
from ._coloraxis import ColoraxisValidator
from ._carpet import CarpetValidator
from ._btype import BtypeValidator
from ._bsrc import BsrcValidator
from ._b0 import B0Validator
from ._b import BValidator
from ._autocontour import AutocontourValidator
from ._autocolorscale import AutocolorscaleValidator
from ._atype import AtypeValidator
from ._asrc import AsrcValidator
from ._a0 import A0Validator
from ._a import AValidator
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__,
[],
[
"._zsrc.ZsrcValidator",
"._zmin.ZminValidator",
"._zmid.ZmidValidator",
"._zmax.ZmaxValidator",
"._zauto.ZautoValidator",
"._z.ZValidator",
"._yaxis.YaxisValidator",
"._xaxis.XaxisValidator",
"._visible.VisibleValidator",
"._uirevision.UirevisionValidator",
"._uid.UidValidator",
"._transpose.TransposeValidator",
"._textsrc.TextsrcValidator",
"._text.TextValidator",
"._stream.StreamValidator",
"._showscale.ShowscaleValidator",
"._showlegend.ShowlegendValidator",
"._reversescale.ReversescaleValidator",
"._opacity.OpacityValidator",
"._ncontours.NcontoursValidator",
"._name.NameValidator",
"._metasrc.MetasrcValidator",
"._meta.MetaValidator",
"._line.LineValidator",
"._legendwidth.LegendwidthValidator",
"._legendrank.LegendrankValidator",
"._legendgrouptitle.LegendgrouptitleValidator",
"._legendgroup.LegendgroupValidator",
"._idssrc.IdssrcValidator",
"._ids.IdsValidator",
"._hovertextsrc.HovertextsrcValidator",
"._hovertext.HovertextValidator",
"._fillcolor.FillcolorValidator",
"._db.DbValidator",
"._da.DaValidator",
"._customdatasrc.CustomdatasrcValidator",
"._customdata.CustomdataValidator",
"._contours.ContoursValidator",
"._colorscale.ColorscaleValidator",
"._colorbar.ColorbarValidator",
"._coloraxis.ColoraxisValidator",
"._carpet.CarpetValidator",
"._btype.BtypeValidator",
"._bsrc.BsrcValidator",
"._b0.B0Validator",
"._b.BValidator",
"._autocontour.AutocontourValidator",
"._autocolorscale.AutocolorscaleValidator",
"._atype.AtypeValidator",
"._asrc.AsrcValidator",
"._a0.A0Validator",
"._a.AValidator",
],
)
| [
"[email protected]"
] | |
c484b176ad74bbf3c3d2c6945058b3f6fa039104 | 1978a9455159b7c2f3286e0ad602652bc5277ffa | /exercises/05_basic_scripts/task_5_2b.py | 942e752a8c38f07e0e2a188e036ef30e8781ecff | [] | no_license | fortredux/py_net_eng | 338fd7a80debbeda55b5915dbfba4f5577279ef0 | 61cf0b2a355d519c58bc9f2b59d7e5d224922890 | refs/heads/master | 2020-12-03T17:32:53.598813 | 2020-04-08T20:55:45 | 2020-04-08T20:55:45 | 231,409,656 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,047 | py | # -*- coding: utf-8 -*-
'''
Задание 5.2b
Преобразовать скрипт из задания 5.2a таким образом,
чтобы сеть/маска не запрашивались у пользователя,
а передавались как аргумент скрипту.
Ограничение: Все задания надо выполнять используя только пройденные темы.
'''
from sys import argv
ip = str(argv[1])
mask = int(argv[2])
host_net_lst = ip.split('.')
host_net_lst[3] = '0'
mask_32 = int('1' * mask)
mask_32 = '{:<032}'.format(mask_32)
template = '''
Network:
{0:<8} {1:<8} {2:<8} {3:<8}
{0:08b} {1:08b} {2:08b} {3:08b}
Mask:
/{4:}
{5:<8} {6:<8} {7:<8} {8:<8}
{9:8} {10:8} {11:8} {12:8}
'''
print(template.format(int(host_net_lst[0]), int(host_net_lst[1]), int(host_net_lst[2]), int(host_net_lst[3]), mask,
int(mask_32[0:8], 2), int(mask_32[8:16], 2), int(mask_32[16:24], 2), int(mask_32[24:32], 2),
mask_32[0:8], mask_32[8:16], mask_32[16:24], mask_32[24:32])) | [
"[email protected]"
] | |
fe91480c51ec9d9e11d8cbf4c07c3dbad667f8a4 | f2f21c643d1f5459253989e7cdba85c064cca8ce | /adding_bootstarp/adding_bootstarp/wsgi.py | b02fcd063eb36aa3dc1d03dc3104e13e690ebccf | [] | no_license | NiteshTyagi/django_tutorial | 342decea7532f1efb200b9f45e4123c581aad43f | 3353f0d2907a00f43e1faee2b97abd9af66ca08f | refs/heads/master | 2022-03-05T19:46:50.642154 | 2022-03-01T04:53:14 | 2022-03-01T04:53:14 | 205,629,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | """
WSGI config for adding_bootstarp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'adding_bootstarp.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
079662848033b228ee09c8bb812f1c80e52e4cb0 | 1f68b6f9f55afaa7cb32df262f4fe0864472da05 | /leetcode(多线程,DP,贪心,SQL)/二刷DP与贪心LeetCode/回溯/51. N皇后/solution.py | 761c902fdb433e6e6f0765ec8b75578496b26cb9 | [] | no_license | faker-hong/testOne | 7c4496362cb5495c25c640076102fe0704f8552f | 768edc4a5526c8972fec66c6a71a38c0b24a1451 | refs/heads/master | 2022-12-04T14:47:53.614685 | 2020-10-30T03:17:50 | 2020-10-30T03:17:50 | 196,514,862 | 1 | 0 | null | 2022-11-22T02:43:32 | 2019-07-12T05:35:09 | Python | UTF-8 | Python | false | false | 1,100 | py | class Solution(object):
def solveNQueens(self, n):
"""
:type n: int
:rtype: List[List[str]]
"""
# 判断放置的皇后位置与之前的是否冲突
def is_valid(row, col, track):
# 因为每一次的row不同,所以不用判断是否在同一行
if col in track: # 是否在同一列
return False
# 判断是否在两条对角线上
for k in range(row):
if row + col == k + track[k] or row - col == k - track[k]:
return False
return True
def backtrack(row, track):
if row == n:
res.append(track)
return
for col in range(n):
if is_valid(row, col, track): # 位置合法,进入下一行
backtrack(row + 1, track + [col])
res = []
backtrack(0, [])
return [['.'*i + 'Q' + '.'*(n-i-1) for i in l] for l in res]
if __name__ == '__main__':
s = Solution()
res = s.solveNQueens(4)
print(res) | [
"[email protected]"
] | |
c280ee3b854a4f6043932dbcd3aa1b31846f9e2c | 47988c4d1e2c07cd2465da204890f481d59dbd4b | /src/tests/ftest/util/command_utils_base.py | e25429c501973d0d6e453644fdfcba2b0cf4b268 | [
"BSD-2-Clause",
"BSD-2-Clause-Patent"
] | permissive | dsikich/daos | 974000a2e9a37c2edc994007f864ab69afe347e3 | 13385f8eb3209dfe9f63772a68a3bb8cadaf2e23 | refs/heads/master | 2022-07-07T05:46:07.074084 | 2022-06-29T13:01:52 | 2022-06-29T13:01:52 | 242,208,796 | 0 | 0 | NOASSERTION | 2021-12-07T21:17:27 | 2020-02-21T18:50:31 | C | UTF-8 | Python | false | false | 26,663 | py | #!/usr/bin/python
"""
(C) Copyright 2020-2022 Intel Corporation.
SPDX-License-Identifier: BSD-2-Clause-Patent
"""
from logging import getLogger
import os
import yaml
from exception_utils import CommandFailure
class BasicParameter():
"""A class for parameters whose values are read from a yaml file."""
def __init__(self, value, default=None, yaml_key=None):
"""Create a BasicParameter object.
Normal use includes assigning this object to an attribute name that
matches the test yaml file key used to assign its value. If the
variable name will conflict with another class attribute, e.g. self.log,
then the `yaml_key` argument can be used to define the test yaml file
key independently of the attribute name.
Args:
value (object): initial value for the parameter
default (object, optional): default value. Defaults to None.
yaml_key (str, optional): the yaml key name to use when finding the
value to assign from the test yaml file. Default is None which
will use the object's variable name as the yaml key.
"""
self._value = value if value is not None else default
self._default = default
self._yaml_key = yaml_key
self.log = getLogger(__name__)
# Flag used to indicate if a parameter value has or has not been updated
self.updated = True
def __str__(self):
"""Convert this BasicParameter into a string.
Returns:
str: the string version of the parameter's value
"""
return str(self.value) if self.value is not None else ""
@property
def value(self):
"""Get the value of this setting.
Returns:
object: value currently assigned to the setting
"""
return self._value
@value.setter
def value(self, item):
"""Set the value of this setting.
Args:
item (object): value to assign for the setting
"""
if item != self._value:
self._value = item
self.updated = True
def get_yaml_value(self, name, test, path):
"""Get the value for the parameter from the test case's yaml file.
Args:
name (str): name of the value in the yaml file
test (Test): avocado Test object to use to read the yaml file
path (str): yaml path where the name is to be found
"""
if self._yaml_key is not None:
# Use the yaml key name instead of the variable name
name = self._yaml_key
if hasattr(test, "config") and test.config is not None:
self.value = test.config.get(name, path, self._default)
else:
self.value = test.params.get(name, path, self._default)
def update(self, value, name=None, append=False):
"""Update the value of the parameter.
Args:
value (object): value to assign
name (str, optional): name of the parameter which, if provided, is
used to display the update. Defaults to None.
append (bool, optional): append/extend/update the current list/dict
with the provided value. Defaults to False - override the
current value.
"""
if append and isinstance(self.value, list):
if isinstance(value, list):
# Add the new list of value to the existing list
self.value.extend(value)
else:
# Add the new value to the existing list
self.value.append(value)
self.updated = True
elif append and isinstance(self.value, dict):
# Update the dictionary with the new key/value pairs
self.value.update(value)
self.updated = True
else:
# Override the current value with the new value
self.value = value
if name is not None:
self.log.debug("Updated param %s => %s", name, self.value)
def update_default(self, value):
"""Update the BasicParameter default value.
Args:
value (object): new default value
"""
self._default = value
class FormattedParameter(BasicParameter):
# pylint: disable=too-few-public-methods
"""A class for test parameters whose values are read from a yaml file."""
def __init__(self, str_format, default=None, yaml_key=None):
"""Create a FormattedParameter object.
Normal use includes assigning this object to an attribute name that
matches the test yaml file key used to assign its value. If the
variable name will conflict with another class attribute, e.g. self.log,
then the `yaml_key` argument can be used to define the test yaml file
key independently of the attribute name.
Args:
str_format (str): format string used to convert the value into an
command line argument string
default (object): default value for the param
yaml_key (str, optional): alternative yaml key name to use when
assigning the value from a yaml file. Default is None which
will use the object's variable name as the yaml key.
"""
super().__init__(default, default)
self._str_format = str_format
self._yaml_key = yaml_key
def __str__(self):
"""Return a FormattedParameter object as a string.
Returns:
str: if defined, the parameter, otherwise an empty string
"""
parameter = ""
if isinstance(self._default, bool) and self.value:
parameter = self._str_format
elif not isinstance(self._default, bool) and self.value is not None:
if isinstance(self.value, dict):
parameter = " ".join([
self._str_format.format(
"{} \"{}\"".format(key, self.value[key]))
for key in self.value])
elif isinstance(self.value, (list, tuple)):
parameter = " ".join(
[self._str_format.format(value) for value in self.value])
else:
parameter = self._str_format.format(self.value)
return parameter
def get_yaml_value(self, name, test, path):
"""Get the value for the parameter from the test case's yaml file.
Args:
name (str): name of the value in the yaml file - not used
test (Test): avocado Test object to use to read the yaml file
path (str): yaml path where the name is to be found
"""
if self._yaml_key is not None:
# Use the yaml key name instead of the variable name
name = self._yaml_key
return super().get_yaml_value(name, test, path)
class LogParameter(FormattedParameter):
"""A class for a test log file parameter which is read from a yaml file."""
def __init__(self, directory, str_format, default=None):
"""Create a LogParameter object.
Args:
directory (str): fixed location for the log file name specified by
the yaml file
str_format (str): format string used to convert the value into an
command line argument string
default (object): default value for the param
"""
super().__init__(str_format, default)
self._directory = directory
self._add_directory()
def _add_directory(self):
"""Add the directory to the log file name assignment.
The initial value is restricted to just the log file name as the
location (directory) of the file is fixed. This method updates the
initial log file value (just the log file name) to include the directory
and name for the log file.
"""
if isinstance(self.value, str):
name = os.path.basename(self.value)
self.value = os.path.join(self._directory, name)
elif self.value is not None:
self.log.info(
"Warning: '%s' not added to '%s' due to incompatible type: %s",
self._directory, self.value, type(self.value))
def get_yaml_value(self, name, test, path):
"""Get the value for the parameter from the test case's yaml file.
Args:
name (str): name of the value in the yaml file
test (Test): avocado Test object to use to read the yaml file
path (str): yaml path where the name is to be found
"""
super().get_yaml_value(name, test, path)
self._add_directory()
self.log.debug(" Added the directory: %s => %s", name, self.value)
def update(self, value, name=None, append=False):
"""Update the value of the parameter.
Args:
value (object): value to assign
name (str, optional): name of the parameter which, if provided, is
used to display the update. Defaults to None.
append (bool, optional): append/extend/update the current list/dict
with the provided value. Defaults to False - override the
current value.
"""
super().update(value, name, append)
self._add_directory()
self.log.debug(" Added the directory: %s => %s", name, self.value)
class ObjectWithParameters():
"""A class for an object with parameters."""
def __init__(self, namespace):
"""Create a ObjectWithParameters object.
Args:
namespace (str): yaml namespace (path to parameters)
"""
self.namespace = namespace
self.log = getLogger(__name__)
def get_attribute_names(self, attr_type=None):
"""Get a sorted list of the names of the attr_type attributes.
Args:
attr_type(object, optional): A single object type or tuple of
object types used to filter class attributes by their type.
Defaults to None.
Returns:
list: a list of class attribute names used to define parameters
"""
return [
name for name in sorted(self.__dict__.keys())
if attr_type is None or isinstance(getattr(self, name), attr_type)]
def get_param_names(self):
"""Get a sorted list of the names of the BasicParameter attributes.
Note: Override this method to change the order or inclusion of a
command parameter in the get_params() method.
Returns:
list: a list of class attribute names used to define parameters
"""
return self.get_attribute_names(BasicParameter)
def get_params(self, test):
"""Get values for all of the command params from the yaml file.
Sets each BasicParameter object's value to the yaml key that matches
the assigned name of the BasicParameter object in this class. For
example, the self.block_size.value will be set to the value in the yaml
file with the key 'block_size'.
If no key matches are found in the yaml file the BasicParameter object
will be set to its default value.
Args:
test (Test): avocado Test object
"""
for name in self.get_param_names():
getattr(self, name).get_yaml_value(name, test, self.namespace)
def update_params(self, **params):
"""Update each of provided parameter name and value pairs."""
for name, value in params.items():
try:
getattr(self, name).update(value, name)
except AttributeError as error:
raise CommandFailure("Unknown parameter: {}".format(name)) from error
class CommandWithParameters(ObjectWithParameters):
"""A class for command with parameters."""
def __init__(self, namespace, command, path=""):
"""Create a CommandWithParameters object.
Uses Avocado's utils.process module to run a command str provided.
Args:
namespace (str): yaml namespace (path to parameters)
command (str): string of the command to be executed.
path (str, optional): path to location of command binary file.
Defaults to "".
"""
super().__init__(namespace)
self._command = command
self._path = path
self._pre_command = None
@property
def command(self):
"""Get the command without its parameters."""
return self._command
@property
def command_path(self):
"""Get the path used for the command."""
return self._path
def __str__(self):
"""Return the command with all of its defined parameters as a string.
Returns:
str: the command with all the defined parameters
"""
# Join all the parameters that have been assigned a value with the
# command to create the command string
params = []
for name in self.get_str_param_names():
value = str(getattr(self, name))
if value != "":
params.append(value)
# Append the path to the command and prepend it with any other
# specified commands
command_list = [] if self._pre_command is None else [self._pre_command]
command_list.append(os.path.join(self._path, self._command))
# Return the command and its parameters
return " ".join(command_list + params)
def get_str_param_names(self):
"""Get a sorted list of the names of the command attributes.
Returns:
list: a list of class attribute names used to define parameters
for the command.
"""
return self.get_param_names()
class YamlParameters(ObjectWithParameters):
"""A class of parameters used to create a yaml file."""
def __init__(self, namespace, filename=None, title=None, other_params=None):
"""Create a YamlParameters object.
Args:
namespace (str): yaml namespace (path to parameters)
filename (str): the yaml file to generate with the parameters
title (str, optional): namespace under which to place the
parameters when creating the yaml file. Defaults to None.
other_params (YamlParameters, optional): yaml parameters to
include with these yaml parameters. Defaults to None.
"""
super().__init__(namespace)
self.filename = filename
self.title = title
self.other_params = other_params
def get_params(self, test):
"""Get values for the yaml parameters from the test yaml file.
Args:
test (Test): avocado Test object
"""
# Get the values for the yaml parameters defined by this class
super().get_params(test)
# Get the values for the yaml parameters defined by the other class
if self.other_params is not None:
self.other_params.get_params(test)
def get_yaml_data(self):
"""Convert the parameters into a dictionary to use to write a yaml file.
Returns:
dict: a dictionary of parameter name keys and values
"""
if (self.other_params is not None and
hasattr(self.other_params, "get_yaml_data")):
yaml_data = self.other_params.get_yaml_data()
else:
yaml_data = {}
for name in self.get_param_names():
value = getattr(self, name).value
if value is not None:
yaml_data[name] = value
return yaml_data if self.title is None else {self.title: yaml_data}
def is_yaml_data_updated(self):
"""Determine if any of the yaml file parameters have been updated.
Returns:
bool: whether or not a yaml file parameter has been updated
"""
yaml_data_updated = False
if (self.other_params is not None and
hasattr(self.other_params, "is_yaml_data_updated")):
yaml_data_updated = self.other_params.is_yaml_data_updated()
if not yaml_data_updated:
for name in self.get_param_names():
if getattr(self, name).updated:
yaml_data_updated = True
break
return yaml_data_updated
def reset_yaml_data_updated(self):
"""Reset each yaml file parameter updated state to False."""
if (self.other_params is not None and
hasattr(self.other_params, "reset_yaml_data_updated")):
self.other_params.reset_yaml_data_updated()
for name in self.get_param_names():
getattr(self, name).updated = False
def create_yaml(self, filename=None):
"""Create a yaml file from the parameter values.
A yaml file will only be created if at least one of its parameter values
have be updated (BasicParameter.updated = True).
Args:
filename (str, optional): the yaml file to generate with the
parameters. Defaults to None, which uses self.filename.
Raises:
CommandFailure: if there is an error creating the yaml file
Returns:
bool: whether or not an updated yaml file was created
"""
create_yaml = self.is_yaml_data_updated()
if create_yaml:
# Write a new yaml file if any of the parameters have been updated
if filename is None:
filename = self.filename
yaml_data = self.get_yaml_data()
self.log.info("Writing yaml configuration file %s", filename)
try:
with open(filename, 'w') as write_file:
yaml.dump(yaml_data, write_file, default_flow_style=False)
except Exception as error:
raise CommandFailure(
"Error writing the yaml file {}: {}".format(
filename, error)) from error
self.reset_yaml_data_updated()
return create_yaml
def set_value(self, name, value):
"""Set the value for a specified attribute name.
Args:
name (str): name of the attribute for which to set the value
value (object): the value to set
Returns:
bool: if the attribute name was found and the value was set
"""
status = False
setting = getattr(self, name, None)
if setting is not None and hasattr(setting, "update"):
setting.update(value, name)
status = True
elif setting is not None:
setattr(self, name, value)
self.log.debug("Updated param %s => %s", name, value)
status = True
elif self.other_params is not None:
status = self.other_params.set_value(name, value)
return status
def get_value(self, name):
"""Get the value of the specified attribute name.
Args:
name (str): name of the attribute from which to get the value
Returns:
object: the object's value referenced by the attribute name
"""
setting = getattr(self, name, None)
if setting is not None and hasattr(setting, "value"):
value = setting.value
elif setting is not None:
value = setting
elif self.other_params is not None:
value = self.other_params.get_value(name)
else:
value = None
return value
class TransportCredentials(YamlParameters):
"""Transport credentials listing certificates for secure communication."""
def __init__(self, namespace, title, log_dir):
"""Initialize a TransportConfig object.
Args:
namespace (str): yaml namespace (path to parameters)
title (str, optional): namespace under which to place the
parameters when creating the yaml file. Defaults to None.
"""
super().__init__(namespace, None, title)
default_insecure = str(os.environ.get("DAOS_INSECURE_MODE", True))
default_insecure = default_insecure.lower() == "true"
self.ca_cert = LogParameter(log_dir, None, "daosCA.crt")
self.allow_insecure = BasicParameter(None, default_insecure)
def get_yaml_data(self):
"""Convert the parameters into a dictionary to use to write a yaml file.
Returns:
dict: a dictionary of parameter name keys and values
"""
yaml_data = super().get_yaml_data()
# Convert the boolean value into a string
if self.title is not None:
yaml_data[self.title]["allow_insecure"] = self.allow_insecure.value
else:
yaml_data["allow_insecure"] = self.allow_insecure.value
return yaml_data
def get_certificate_data(self, name_list):
"""Get certificate data by name_list.
Args:
name_list (list): list of certificate attribute names.
Returns:
data (dict): a dictionary of parameter directory name keys and
value.
"""
data = {}
if not self.allow_insecure.value:
for name in name_list:
value = getattr(self, name).value
if isinstance(value, str):
dir_name, file_name = os.path.split(value)
if dir_name not in data:
data[dir_name] = [file_name]
else:
data[dir_name].append(file_name)
return data
class CommonConfig(YamlParameters):
"""Defines common daos_agent and daos_server configuration file parameters.
Includes:
- the daos system name (name)
- a list of access point nodes (access_points)
- the default port number (port)
- the transport credentials
"""
def __init__(self, name, transport):
"""Initialize a CommonConfig object.
Args:
name (str): default value for the name configuration parameter
transport (TransportCredentials): transport credentials
"""
super().__init__(
"/run/common_config/*", None, None, transport)
# Common configuration parameters
# - name: <str>, e.g. "daos_server"
# Name associated with the DAOS system.
#
# - access_points: <list>, e.g. ["hostname1:10001"]
# Hosts can be specified with or without port, default port below
# assumed if not specified. Defaults to the hostname of this node
# at port 10000 for local testing
#
# - port: <int>, e.g. 10001
# Default port number with with to bind the daos_server. This
# will also be used when connecting to access points if the list
# only contains host names.
#
self.name = BasicParameter(None, name)
self.access_points = BasicParameter(None, ["localhost"])
self.port = BasicParameter(None, 10001)
class EnvironmentVariables(dict):
"""Dictionary of environment variable keys and values."""
def copy(self):
"""Return a copy of this object.
Returns:
EnvironmentVariables: a copy of this object
"""
return EnvironmentVariables(self)
def get_list(self):
"""Get a list of environment variable assignments.
Returns:
list: a list of environment variable assignment (key=value) strings
"""
return [
key if value is None else "{}={}".format(key, value)
for key, value in list(self.items())
]
def get_export_str(self, separator=";"):
"""Get the command to export all of the environment variables.
Args:
separator (str, optional): export command separator.
Defaults to ";".
Returns:
str: a string of export commands for each environment variable
"""
export_list = ["export {}".format(export) for export in self.get_list()]
export_str = separator.join(export_list)
if export_str:
export_str = "".join([export_str, separator])
return export_str
class PositionalParameter(BasicParameter):
"""Parameter that defines position.
Used to support positional parameters for dmg and daos.
"""
def __init__(self, position, default=None):
"""Create a PositionalParameter object.
Args:
position (int): argument position/order
default (object, optional): default value for the param. Defaults to
None.
"""
super().__init__(default, default)
self._position = position
@property
def position(self):
"""Position property that defines the position of the parameter."""
return self._position
def __lt__(self, other):
return self.position < other.position
def __gt__(self, other):
return self.position > other.position
def __eq__(self, other):
return self.position == other.position
def __hash__(self):
"""Returns self.position as the hash of the class.
This is used in CommandWithPositionalParameters.get_attribute_names()
where we use this object as the key for a dictionary.
"""
return self.position
class CommandWithPositionalParameters(CommandWithParameters):
"""Command that uses positional parameters.
Used to support positional parameters for dmg and daos.
"""
def get_attribute_names(self, attr_type=None):
"""Get a sorted list of the names of the attr_type attributes.
The list has the ordered positional parameters first, then
non-positional parameters.
Args:
attr_type(object, optional): A single object type or tuple of
object types used to filter class attributes by their type.
Defaults to None.
Returns:
list: a list of class attribute names used to define parameters
"""
positional = {}
non_positional = []
for name in sorted(list(self.__dict__)):
attr = getattr(self, name)
if isinstance(attr, attr_type):
if hasattr(attr, "position"):
positional[attr] = name
else:
non_positional.append(name)
return [positional[key] for key in sorted(positional)] + non_positional
| [
"[email protected]"
] | |
952fd72ad5a8100025aa2e461084375532616b8e | 677562bf6835be104204f32a6c9998d9a901f9fc | /from_scratch/detect_metadata/times.py | fefebd85201b58cd0821fa91c8c528a5f775d688 | [] | no_license | santokalayil/neural_network | 3cb2f843430e9f35e017edcde83ba13212d0f5cf | f453856214d027f55afc5c861784dc693a9bf2c6 | refs/heads/main | 2023-06-12T01:53:43.588403 | 2021-07-02T08:30:20 | 2021-07-02T08:30:20 | 382,281,787 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 343 | py | # import platform # to detect the operating system
import os
import time
def get_last_modified_time(path_to_file):
return os.path.getmtime(path_to_file) # time.ctime(os.path.getmtime(path_to_file))
def get_created_time(path_to_file):
return os.path.getctime(path_to_file) # time.ctime(os.path.getctime(path_to_file))
| [
"[email protected]"
] | |
3b937e27177d4b2213f47baa81c00973e7037be0 | bc4910ecec94749697dbce5e7cf5093275411125 | /src/generator/Cheetah/ErrorCatchers.py | 500f2490d613628fe69f683fafa883f5d586e55d | [
"MIT"
] | permissive | se210/tracy | 7e73a6f0d64f355842b9a11035c3720b4d40fde5 | 232a42ce1aefcffa1f8544c89d60a16ebd897033 | refs/heads/master | 2021-01-09T20:55:03.241111 | 2013-12-15T23:34:36 | 2013-12-15T23:34:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,838 | py | #!/usr/bin/env python
# $Id: ErrorCatchers.py,v 1.1 2006-09-06 09:50:08 skyostil Exp $
"""ErrorCatcher class for Cheetah Templates
Meta-Data
================================================================================
Author: Tavis Rudd <[email protected]>
Version: $Revision: 1.1 $
Start Date: 2001/08/01
Last Revision Date: $Date: 2006-09-06 09:50:08 $
"""
__author__ = "Tavis Rudd <[email protected]>"
__revision__ = "$Revision: 1.1 $"[11:-2]
import time
from Cheetah.NameMapper import NotFound
class Error(Exception):
pass
class ErrorCatcher:
_exceptionsToCatch = (NotFound,)
def __init__(self, templateObj):
pass
def exceptions(self):
return self._exceptionsToCatch
def warn(self, exc_val, code, rawCode, lineCol):
return rawCode
## make an alias
Echo = ErrorCatcher
class BigEcho(ErrorCatcher):
def warn(self, exc_val, code, rawCode, lineCol):
return "="*15 + "<" + rawCode + " could not be found>" + "="*15
class KeyError(ErrorCatcher):
def warn(self, exc_val, code, rawCode, lineCol):
raise KeyError("no '%s' in this Template Object's Search List" % rawCode)
class ListErrors(ErrorCatcher):
"""Accumulate a list of errors."""
_timeFormat = "%c"
def __init__(self, templateObj):
ErrorCatcher.__init__(self, templateObj)
self._errors = []
def warn(self, exc_val, code, rawCode, lineCol):
dict = locals().copy()
del dict['self']
dict['time'] = time.strftime(self._timeFormat,
time.localtime(time.time()))
self._errors.append(dict)
return rawCode
def listErrors(self):
"""Return the list of errors."""
return self._errors
| [
"[email protected]"
] | |
5d85ec597bf50561c780343e1b57a17fe70cdec9 | 581c2beee0cf5656971987a19064524e3de7bc68 | /distributions/lomax.py | 6afe6b8af24109d2284426642b481f36d5ef196f | [
"MIT"
] | permissive | bballamudi/survival | ac6925b30ba3ca9ed574ba056d36830f9129114f | c86186b08e7896096b9d59a5013335f56283a7c4 | refs/heads/master | 2020-03-21T14:16:29.578172 | 2018-06-09T21:09:08 | 2018-06-09T21:09:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,123 | py | import numpy as np
from scipy.stats import lomax
from distributions.basemodel import *
class Lomax(Base):
'''
We can instantiate a Lomax distribution
(https://en.wikipedia.org/wiki/Lomax_distribution)
with this class.
'''
def __init__(self, k = None, lmb = None, ti = None, xi = None):
'''
Instantiate a Lomax distribution.
args:
k: The shape parameter of the Lomax distribution.
lmb: The scale parameter of the lomax distribution.
ti: The uncensored samples for fitting the distribution.
xi: The censored samples for fitting the distribution.
'''
if ti is not None:
self.train_org = ti
self.train_inorg = xi
self.newtonRh()
else:
self.train = []
self.test = []
self.train_org = []
self.train_inorg = []
self.k = k
self.lmb = lmb
self.params = [self.k, self.lmb]
def determine_params(self, k, lmb, params):
'''
Determines the parameters. Defined in basemodel.py
'''
return super(Lomax, self).determine_params(k, lmb, params)
def pdf(self,t,k=None,lmb=None,params=None):
'''
The probability distribution function (PDF) of the Lomax distribution.
args:
t: The value at which the PDF is to be calculated.
k: The shape parameter of the Lomax distribution.
lmb: The scale parameter of the lomax distribution.
'''
[k,lmb] = self.determine_params(k,lmb,params)
return lmb*k/(1+lmb*t)**(k+1)
def cdf(self,t,k=None,lmb=None,params=None):
'''
The cumulative density functino of the Lomax distribution.
Probability that the distribution is lower than a certain value.
args:
t: The value at which CDF is to be calculated.
k: The shape parameter of the Lomax.
lmb: The sclae parameter of the Lomax.
params: A 2d array with the shape and scale parameters.
'''
[k,lmb] = self.determine_params(k,lmb,params)
return 1-(1+lmb*t)**-k
def survival(self,t,k=None,lmb=None, params = None):
'''
The survival function for the Lomax distribution.
'''
[k,lmb] = self.determine_params(k,lmb,params)
return (1+lmb*t)**-k
def logpdf(self,t,k,lmb):
'''
The logarithm of the PDF function. Handy for calculating log likelihood.
args:
t: The value at which function is to be calculated.
l: The shape parameter.
lmb: The scale parameter.
'''
return np.log(k) + np.log(lmb) - (k+1)*np.log(1+lmb*t)
def logsurvival(self,t,k,lmb):
'''
The logarithm of the survival function. Handy for calculating log likelihood.
args:
t: The value at which function is to be calculated.
l: The shape parameter.
lmb: The scale parameter.
'''
return -k*np.log(1+lmb*t)
def loglik(self,t,x,k=0.5,lmb=0.3):
'''
The logarithm of the likelihood function.
args:
t: The un-censored samples.
x: The censored samples.
l: The shape parameter.
lmb: The scale parameter.
'''
return sum(self.logpdf(t,k,lmb)) +sum(self.logsurvival(x,k,lmb))
def grad(self,t,x,k=0.5,lmb=0.3):
'''
The gradient of the log-likelihood function.
args:
t: The un-censored samples.
x: The censored samples.
l: The shape parameter.
lmb: The scale parameter.
'''
n = len(t)
m = len(x)
delk = n/k - sum(np.log(1+lmb*t)) - sum(np.log(1+lmb*x))
dellmb = n/lmb -(k+1) * sum(t/(1+lmb*t)) -k*sum(x/(1+lmb*x))
return np.array([delk,dellmb])
def numerical_grad(self,t,x,k=None,lmb=None):
'''
Calculates the gradient of the log-likelihood function numerically.
args:
t: The survival data.
x: The censored data.
k: The shape parameter.
lmb: The scale parameter.
'''
if k is None or lmb is None:
k = self.k
lmb = self.lmb
eps = 1e-5
delk = (self.loglik(t,x,k+eps,lmb) - self.loglik(t,x,k-eps,lmb))/2/eps
dellmb = (self.loglik(t,x,k,lmb+eps) - self.loglik(t,x,k,lmb-eps))/2/eps
return np.array([delk, dellmb])
def hessian(self,t,x,k=0.5,lmb=0.3):
'''
The hessian of the Loglikelihood function for Lomax.
args:
t: The un-censored samples.
x: The censored samples.
l: The shape parameter.
lmb: The scale parameter.
'''
n=len(t)
delksq = -n/k**2
dellmbsq = -n/lmb**2 + (k+1)*sum((t/(1+lmb*t))**2) + k*sum((x/(1+lmb*x))**2)
delklmb = -sum(t/(1+lmb*t)) - sum(x/(1+lmb*x))
hess = np.zeros([2,2])
hess[0,0] = delksq
hess[1,1] = dellmbsq
hess[0,1] = hess[1,0] = delklmb
return hess
def numerical_hessian(self,t,x,k=0.5,lmb=0.3):
'''
Calculates the hessian of the log-likelihood function numerically.
args:
t: The survival data.
x: The censored data.
k: The shape parameter.
lmb: The scale parameter.
'''
eps = 1e-4
delksq = (self.loglik(t,x,k+2*eps,lmb) + self.loglik(t,x,k-2*eps,lmb) - 2*self.loglik(t,x,k,lmb))/4/eps/eps
dellmbsq = (self.loglik(t,x,k,lmb+2*eps) + self.loglik(t,x,k,lmb-2*eps) - 2*self.loglik(t,x,k,lmb))/4/eps/eps
dellmbk = (self.loglik(t,x,k+eps,lmb+eps) + self.loglik(t,x,k-eps,lmb-eps)
- self.loglik(t,x,k+eps,lmb-eps) - self.loglik(t,x,k-eps,lmb+eps))/4/eps/eps
hess = np.zeros([2,2])
hess[0,0] = delksq
hess[1,1] = dellmbsq
hess[0,1] = hess[1,0] = dellmbk
return hess
def gradient_descent(self, numIter=2001, params = np.array([.5,.3]), verbose=False):
'''
Performs gradient descent to get the best fitting parameters for
this Lomax given the censored and un-censored data.
args:
numIter: The maximum number of iterations for the iterative method.
params: The initial guess for the shape and scale parameters respectively.
verbose: Set to true for debugging. Shows progress as it fits data.
'''
for i in range(numIter):
lik = self.loglik(self.train_org,self.train_inorg,params[0],params[1])
directn = self.grad(self.train_org,self.train_inorg,params[0],params[1])
params2 = params
for alp1 in [1e-8,1e-7,1e-5,1e-3,1e-2,.1]:
params1 = params + alp1 * directn
if(min(params1) > 0):
lik1 = self.loglik(self.train_org,self.train_inorg,params1[0],params1[1])
if(lik1 > lik and np.isfinite(lik1)):
lik = lik1
params2 = params1
params = params2
if i%100 == 0 and verbose:
print("Iteration " + str(i) + " ,objective function: " + str(lik) + " \nparams = " + str(params) + " \nGradient = " + str(directn))
print("\n########\n")
return params
'''
def newtonRh(self, numIter=101, params = np.array([.1,.1]), verbose=False):
"""
Fits the parameters of a Lomax distribution to data (censored and uncensored).
Uses the Newton Raphson method for explanation, see: https://www.youtube.com/watch?v=acsSIyDugP0
args:
numIter: The maximum number of iterations for the iterative method.
params: The initial guess for the shape and scale parameters respectively.
verbose: Set to true for debugging. Shows progress as it fits data.
"""
for i in range(numIter):
directn = self.grad(self.train_org,self.train_inorg,params[0],params[1])
if sum(abs(directn)) < 1e-5:
if verbose:
print("\nIt took: " + str(i) + " Iterations.\n Gradients - " + str(directn))
self.params = params
[self.k, self.lmb] = params
return params
lik = self.loglik(self.train_org,self.train_inorg,params[0],params[1])
step = np.linalg.solve(self.hessian(self.train_org,self.train_inorg,params[0],params[1]),directn)
params = params - step
if min(params) < 0:
print("Drastic measures")
params = params + step # undo the effect of taking the step.
params2 = params
for alp1 in [1e-8,1e-7,1e-5,1e-3,1e-2,.1,.5,1.0]:
params1 = params - alp1 * step
if(max(params1) > 0):
lik1 = self.loglik(self.train_org,self.train_inorg,params1[0],params1[1])
if(lik1 > lik and np.isfinite(lik1)):
lik = lik1
params2 = params1
scale = alp1
params = params2
if i % 10 == 0 and verbose:
print("Iteration " + str(i) + " ,objective function: " + str(lik) + " \nparams = " + str(params) + " \nGradient = " + str(directn) + "\n##\n\n")
[self.k, self.lmb] = params
self.params = params
return params
'''
def optimal_wait_threshold(self, intervention_cost, k=None, lmb=None):
'''
Gets the optimal time one should wait for a Lomax recovery before intervention.
args:
intervention_cost: The cost of intervening.
k: The shape parameter of this Lomax distribution.
lmb: The scale parameter of this Lomax distribution.
'''
if k is None or lmb is None:
k = self.k
lmb = self.lmb
return (intervention_cost*k - 1/lmb)
def expectedDT(self,tau,k,lmb,intervention_cost):
'''
The expected downtime incurred when the waiting threshold is set to an arbitrary value.
args:
tau: The value we should set for the intervention threshold.
k: The shape parameter of the current Lomax.
lmb: The scale parameter of the current Lomax.
intervention_cost: The cost of intervening.
'''
return 1/lmb/(k-1) - (1/lmb/(k-1) + tau*k/(k-1))*1/(1+lmb*tau)**k + (tau + intervention_cost)*1/(1+lmb*tau)**k
@staticmethod
def expectedDT_s(tau,k,lmb,intervention_cost):
'''
The expected downtime incurred when the waiting threshold is set to an arbitrary value (static version).
args:
tau: The value we should set for the intervention threshold.
k: The shape parameter of the current Lomax.
lmb: The scale parameter of the current Lomax.
intervention_cost: The cost of intervening.
'''
return 1/lmb/(k-1) - (1/lmb/(k-1) + tau*k/(k-1))*1/(1+lmb*tau)**k + (tau + intervention_cost)*1/(1+lmb*tau)**k
def expectedT(self,tau,k=None,lmb=None,params=None):
'''
The expected value of the Lomax conditional on it being less than tau.
args:
tau: Censor the Lomax here.
k: The shape parameter of the current Lomax.
lmb: The scale parameter of the current Lomax.
params: A 2-d array with shape and scale parameters.
'''
[k,lmb] = self.determine_params(k,lmb,params)
return (1/lmb/(k-1) - (1/lmb/(k-1) + tau*k/(k-1))*1/(1+lmb*tau)**k)/(1-1/(1+lmb*tau)**k)
def samples(self, k=None, lmb=None, size=1000, params=None):
'''
Generates samples for the Lomax distribution.
args:
k: Shape of Lomax.
lmb: Scale of Lomax.
size: The number of simulations to be generated.
params: A 2-d array with shape and scale parameters.
'''
[k, lmb] = self.determine_params(k, lmb, params)
return lomax.rvs(c=k, scale=(1 / lmb), size=size)
@staticmethod
def samples_s(k, lmb, size = 1000):
return lomax.rvs(c=k, scale=(1 / lmb), size=size)
def kappafn_k(self,t,x,lmb=0.1):
n = len(t)
return n/(sum(np.log(1+lmb*t)) + sum(np.log(1+lmb*x)))
def kappafn_lmb(self,t,x,lmb=0.1):
n = len(t)
return (n/lmb - sum(t/(1+lmb*t)))/(sum(t/(1+lmb*t)) + sum(x/(1+lmb*x)))
def bisection_fn(self,lmb=0.1):
return self.kappafn_k(self.train_org,self.train_inorg,lmb) - self.kappafn_lmb(self.train_org,self.train_inorg,lmb)
def bisection(self,a=1e-6,b=2000):
n=1
while n < 10000:
c=(a+b)/2
if self.bisection_fn(c) == 0 or (b-a)/2 < 1e-6:
return c
n=n+1
if (self.bisection_fn(c) > 0) == (self.bisection_fn(a) > 0):
a=c
else:
b=c
| [
"[email protected]"
] | |
5207bdfd9ec7ed6f7459b591d7345960cb085457 | 6a5ce7d885db1baa5a9d43b26f0ae623a5ef0f01 | /azure-mgmt-web/azure/mgmt/web/models/domain_registration_input.py | 864529f0239c7032c4baa763d7558207f03f1109 | [
"Apache-2.0"
] | permissive | JammyBrand82/azure-sdk-for-python | 333af194ff9143ec77f49203a5a71f15c399f278 | c65e189cd41bd3464556b17bfcdee1303867996c | refs/heads/master | 2021-01-17T18:31:10.661151 | 2016-03-17T21:03:08 | 2016-03-17T21:03:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,993 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class DomainRegistrationInput(Resource):
"""
Domain registration input for validation Api
:param str id: Resource Id
:param str name: Resource Name
:param str location: Resource Location
:param str type: Resource type
:param dict tags: Resource tags
:param str domain_registration_input_name: Name of the domain
:param Contact contact_admin: Admin contact information
:param Contact contact_billing: Billing contact information
:param Contact contact_registrant: Registrant contact information
:param Contact contact_tech: Technical contact information
:param str registration_status: Domain registration status. Possible
values include: 'Active', 'Awaiting', 'Cancelled', 'Confiscated',
'Disabled', 'Excluded', 'Expired', 'Failed', 'Held', 'Locked', 'Parked',
'Pending', 'Reserved', 'Reverted', 'Suspended', 'Transferred',
'Unknown', 'Unlocked', 'Unparked', 'Updated', 'JsonConverterFailed'
:param str provisioning_state: Domain provisioning state. Possible values
include: 'Succeeded', 'Failed', 'Canceled', 'InProgress', 'Deleting'
:param list name_servers: Name servers
:param bool privacy: If true then domain privacy is enabled for this
domain
:param datetime created_time: Domain creation timestamp
:param datetime expiration_time: Domain expiration timestamp
:param datetime last_renewed_time: Timestamp when the domain was renewed
last time
:param bool auto_renew: If true then domain will renewed automatically
:param bool ready_for_dns_record_management: If true then Azure can
assign this domain to Web Apps. This value will be true if domain
registration status is active and it is hosted on name servers Azure has
programmatic access to
:param list managed_host_names: All hostnames derived from the domain and
assigned to Azure resources
:param DomainPurchaseConsent consent: Legal agreement consent
"""
_validation = {
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'domain_registration_input_name': {'key': 'properties.name', 'type': 'str'},
'contact_admin': {'key': 'properties.contactAdmin', 'type': 'Contact'},
'contact_billing': {'key': 'properties.contactBilling', 'type': 'Contact'},
'contact_registrant': {'key': 'properties.contactRegistrant', 'type': 'Contact'},
'contact_tech': {'key': 'properties.contactTech', 'type': 'Contact'},
'registration_status': {'key': 'properties.registrationStatus', 'type': 'DomainStatus'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
'name_servers': {'key': 'properties.nameServers', 'type': '[str]'},
'privacy': {'key': 'properties.privacy', 'type': 'bool'},
'created_time': {'key': 'properties.createdTime', 'type': 'iso-8601'},
'expiration_time': {'key': 'properties.expirationTime', 'type': 'iso-8601'},
'last_renewed_time': {'key': 'properties.lastRenewedTime', 'type': 'iso-8601'},
'auto_renew': {'key': 'properties.autoRenew', 'type': 'bool'},
'ready_for_dns_record_management': {'key': 'properties.readyForDnsRecordManagement', 'type': 'bool'},
'managed_host_names': {'key': 'properties.managedHostNames', 'type': '[HostName]'},
'consent': {'key': 'properties.consent', 'type': 'DomainPurchaseConsent'},
}
def __init__(self, location, id=None, name=None, type=None, tags=None, domain_registration_input_name=None, contact_admin=None, contact_billing=None, contact_registrant=None, contact_tech=None, registration_status=None, provisioning_state=None, name_servers=None, privacy=None, created_time=None, expiration_time=None, last_renewed_time=None, auto_renew=None, ready_for_dns_record_management=None, managed_host_names=None, consent=None, **kwargs):
super(DomainRegistrationInput, self).__init__(id=id, name=name, location=location, type=type, tags=tags, **kwargs)
self.domain_registration_input_name = domain_registration_input_name
self.contact_admin = contact_admin
self.contact_billing = contact_billing
self.contact_registrant = contact_registrant
self.contact_tech = contact_tech
self.registration_status = registration_status
self.provisioning_state = provisioning_state
self.name_servers = name_servers
self.privacy = privacy
self.created_time = created_time
self.expiration_time = expiration_time
self.last_renewed_time = last_renewed_time
self.auto_renew = auto_renew
self.ready_for_dns_record_management = ready_for_dns_record_management
self.managed_host_names = managed_host_names
self.consent = consent
| [
"[email protected]"
] | |
cf44ce6aefffd95765ff8071f01abc34af978a80 | 41a0220bf117124bf281a50396582c0df1e0675f | /Pyrado/tests/environment_wrappers/test_action_delay.py | c04f64bbf23a9b2a2f2bfdc9db1d3b524b130d61 | [
"BSD-3-Clause"
] | permissive | jacarvalho/SimuRLacra | c071dfc22d4f2c54a198405e8974d03333c9961d | a6c982862e2ab39a9f65d1c09aa59d9a8b7ac6c5 | refs/heads/master | 2022-11-24T20:08:52.376545 | 2020-08-03T09:01:35 | 2020-08-03T09:01:35 | 276,885,755 | 0 | 0 | BSD-3-Clause | 2020-07-03T11:39:21 | 2020-07-03T11:39:21 | null | UTF-8 | Python | false | false | 2,392 | py | import numpy as np
import pytest
from pyrado.spaces.box import BoxSpace
from pyrado.environment_wrappers.action_delay import ActDelayWrapper
from tests.environment_wrappers.mock_env import MockEnv
@pytest.mark.wrappers
def test_no_delay():
mockenv = MockEnv(act_space=BoxSpace(-1, 1, shape=(2,)))
wenv = ActDelayWrapper(mockenv, delay=0)
# Reset to initialize buffer
wenv.reset()
# Perform some actions
wenv.step(np.array([4, 1]))
assert mockenv.last_act == [4, 1]
wenv.step(np.array([7, 5]))
assert mockenv.last_act == [7, 5]
@pytest.mark.wrappers
def test_act_delay():
mockenv = MockEnv(act_space=BoxSpace(-1, 1, shape=(2,)))
wenv = ActDelayWrapper(mockenv, delay=2)
# Reset to initialize buffer
wenv.reset()
# Perform some actions
wenv.step(np.array([0, 1]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([2, 4]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([1, 2]))
assert mockenv.last_act == [0, 1]
wenv.step(np.array([2, 3]))
assert mockenv.last_act == [2, 4]
@pytest.mark.wrappers
def test_reset():
mockenv = MockEnv(act_space=BoxSpace(-1, 1, shape=(2,)))
wenv = ActDelayWrapper(mockenv, delay=1)
# Reset to initialize buffer
wenv.reset()
# Perform some actions
wenv.step(np.array([0, 4]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([4, 4]))
assert mockenv.last_act == [0, 4]
# The next action would be [4, 4], but now we reset again
wenv.reset()
wenv.step(np.array([1, 2]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([2, 3]))
assert mockenv.last_act == [1, 2]
@pytest.mark.wrappers
def test_domain_param():
mockenv = MockEnv(act_space=BoxSpace(-1, 1, shape=(2,)))
wenv = ActDelayWrapper(mockenv, delay=1)
# Reset to initialize buffer
wenv.reset()
# Perform some actions
wenv.step(np.array([0, 1]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([2, 4]))
assert mockenv.last_act == [0, 1]
# change the delay and reset
wenv.domain_param = {'act_delay': 2}
wenv.reset()
wenv.step(np.array([1, 2]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([2, 3]))
assert mockenv.last_act == [0, 0]
wenv.step(np.array([8, 9]))
assert mockenv.last_act == [1, 2]
| [
"[email protected]"
] | |
6673858896690ec1a546222c0f8b383b73cf8ac8 | 847273de4b1d814fab8b19dc651c651c2d342ede | /.history/Sudoku_II_007_20180621235112.py | 41f8ce78220a122f130868148e83683e6dcb7b73 | [] | no_license | Los4U/sudoku_in_python | 0ba55850afcffeac4170321651620f3c89448b45 | 7d470604962a43da3fc3e5edce6f718076197d32 | refs/heads/master | 2020-03-22T08:10:13.939424 | 2018-07-04T17:21:13 | 2018-07-04T17:21:13 | 139,749,483 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,084 | py | from random import randint
import copy
sudoku1 = [
[5, 9, 8, 6, 1, 2, 3, 4, 7],
[2, 1, 7, 9, 3, 4, 8, 6, 5],
[6, 4, 3, 5, 8, 7, 1, 2, 9],
[1, 6, 5, 4, 9, 8, 2, 7, 3],
[3, 2, 9, 7, 6, 5, 4, 1, 8],
[7, 8, 4, 3, 2, 1, 5, 9, 6],
[8, 3, 1, 2, 7, 6, 9, 5, 4],
[4, 7, 2, 8, 5, 9, 6, 3, 1],
[9, 5, 6, 1, 4, 3, 7, " ", " "]
]
sudoku2 = [
[9, 8, 7, 4, 3, 2, 5, 6, 1],
[2, 4, 3, 5, 1, 6, 8, 7, 9],
[5, 6, 1, 7, 9, 8, 4, 3, 2],
[3, 9, 5, 6, 4, 7, 2, 1, 8],
[8, 2, 4, 3, 5, 1, 6, 9, 7],
[1, 7, 6, 2, 8, 9, 3, 4, 5],
[7, 1, 2, 8, 6, 3, 9, 5, 4],
[4, 3, 8, 9, 7, 5, 1, 2, 6],
[' ', 5, ' ', ' ', 2, ' ', 7, ' ', ' ']
]
sudoku3 = [
[9, 8, 7, 4, 3, 2, 5, 6, 1],
[2, 4, 3, 5, 1, 6, 8, 7, 9],
[5, 6, 1, 7, 9, 8, 4, 3, 2],
[3, 9, 5, 6, 4, 7, 2, 1, 8],
[8, 2, 4, 3, 5, 1, 6, 9, 7],
[1, 7, 6, 2, 8, 9, 3, 4, 5],
[7, 1, 2, 8, 6, 3, 9, 5, 4],
[4, 3, 8, 9, 7, 5, 1, 2, 6],
[' ', 5, ' ', ' ', 2, ' ', 7, ' ', ' ']
]
def printSudoku():
i = 0
while i < 10:
if i == 0:
print(" 1 2 3 4 5 6 7 8 9")
print(" -------------------------")
elif i == 3 or i == 6 or i == 9:
print(" -------------------------")
line = "|"
if i < 9:
print(' {2} {1} {0[0]} {0[1]} {0[2]} {1} {0[3]} {0[4]} {0[5]} {1} {0[6]} {0[7]} {0[8]} {1}'.format(sudoku[i], line, i+1))
i = i + 1
print(" ")
print(" %@@@@@@@ @@@ @@@ (@@@@@@@@@ ,@@@@2@@@@@ @@@, /@@@/ @@@, @@@ ")
print(" @@@* @@@ @@@ (@@( /@@@# .@@@% (@@@ @@@, @@@% @@@, @@@. ")
print(" @@@& @@@ @@@ (@@( @@@* @@@% #@@% @@@,.@@@. @@@, @@@. ")
print(" ,@@@@@@* @@@ @@@ (@@( (@@% .@@@* ,@@@ @@@%@@% @@@, @@@. ")
print(" /@@@@@# @@@ @@@ (@@( (@@% .@@@* ,@@@ @@@,@@@( @@@, @@@. ")
print(" *@@@. @@@ .@@& (@@( @@@. @@@% &@@( @@@, &@@@. @@@* .@@@. ")
print(" &, &@@@ #@@@. ,@@@, (@@( ,&@@@* ,@@@& .@@@@ @@@, (@@@/ #@@@* @@@# ")
print(",@@@@@@@@( (@@@@@@@@% (@@@@@@@@@( #@@@@@@@@@, @@@, ,@@@% ,@@@@@@@@@. \n ")
print("To start game input:")
print(" r - to load random puzzle:")
print(" 1 - to load chart nr 1:")
print(" 2 - to load chart nr 2:")
# print(" 3 - to load chart nr 3:")
choice = input("Input here: ")
print("\n\n\n\n")
s = 0
if choice == "R" or choice == "r":
listaSudoku = [sudoku1, sudoku2, sudoku3]
sudoku_number = randint(0, 2)
print("Plansza nr:", sudoku_number)
s = sudoku_number
sudoku = copy.deepcopy(listaSudoku[sudoku_number])
elif int(choice) == 1:
s = 1
sudoku = copy.deepcopy(sudoku1)
elif int(choice) == 2:
s = 2
sudoku = copy.deepcopy(sudoku2)
elif int(choice) == 3:
s = 3
sudoku = copy.deepcopy(sudoku3)
while True: # prints Sudoku until is solved
# print("Your sudoku to solve:")
printSudoku()
print("\nInput 3 numbers in format a b c, np. 4 5 8")
print(" a - row number")
print(" b - column number ")
print(" c - value")
# vprint(" r - reset chart to start\n ")
x = input("Input a b c: ")
print("")
numbers = " 0123456789" # conditions of entering the numbers !
if (len(x) != 5) or (str(x[0]) not in numbers) or (str(x[2]) not in numbers) or (
str(x[4]) not in numbers) or (str(x[1]) != " ") or (str(x[3]) != " "):
if x == "r": # reset
if s == 1:
sudoku = copy.deepcopy(sudoku1)
elif s == 2:
sudoku = copy.deepcopy(sudoku2)
elif s == 3:
sudoku = copy.deepcopy(sudoku3)
elif x == "h": # show:
print(sudoku)
print(sudoku1)
else:
print("Error - wrong number format \n ")
continue
else:
sudoku[int(x[0])-1][int(x[2])-1] = int(x[4])
column1 = 0
column2 = 0
try: # check if sudoku is solved
i = 0
list = []
while i < 9: # check are all column == 45
column = 0
for item in sudoku:
column = column + item[i]
list.append(column)
i += 1
is45 = 0 # check if sudoku is solved
for listElement in list:
if listElement == 45:
is45 = is45 + 1
#
i = 0
for item in sudoku:
if sum(item) == 45 and is45 == 9:
i = i + 1
if i == 9:
printSudoku()
print(" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
print(" @@@@@@@@@@ YOU WIN @@@@@@@@@@")
print(" @@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
break
except TypeError:
print()
| [
"[email protected]"
] | |
8f4065d632706b252e9aaa5aef0f380f65fed859 | 57c38487a6a689318c960fa7d6b0185f372241bc | /presalytics_ooxml_automation/models/theme_effect_map.py | d5e94240d7206cc938862efbf1be434f6ab396ab | [
"MIT"
] | permissive | presalytics/ooxml-automation-python-client | 2c88bae455b7e567ebdb6a4ea106bbdcd192ac47 | fa6100eef1743e43b4d25b3faac79d39fe32c9d7 | refs/heads/master | 2020-06-05T23:42:32.964361 | 2019-12-27T22:51:40 | 2019-12-27T22:51:40 | 192,575,331 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,626 | py | # coding: utf-8
"""
OOXML Automation
This API helps users convert Excel and Powerpoint documents into rich, live dashboards and stories. # noqa: E501
The version of the OpenAPI document: 0.1.0-no-tags
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from presalytics_ooxml_automation.configuration import Configuration
class ThemeEffectMap(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'theme_id': 'str',
'intensity_id': 'int',
'id': 'str'
}
attribute_map = {
'theme_id': 'themeId',
'intensity_id': 'intensityId',
'id': 'id'
}
def __init__(self, theme_id=None, intensity_id=None, id=None, local_vars_configuration=None): # noqa: E501
"""ThemeEffectMap - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._theme_id = None
self._intensity_id = None
self._id = None
self.discriminator = None
self.theme_id = theme_id
if intensity_id is not None:
self.intensity_id = intensity_id
if id is not None:
self.id = id
@property
def theme_id(self):
"""Gets the theme_id of this ThemeEffectMap. # noqa: E501
:return: The theme_id of this ThemeEffectMap. # noqa: E501
:rtype: str
"""
return self._theme_id
@theme_id.setter
def theme_id(self, theme_id):
"""Sets the theme_id of this ThemeEffectMap.
:param theme_id: The theme_id of this ThemeEffectMap. # noqa: E501
:type: str
"""
self._theme_id = theme_id
@property
def intensity_id(self):
"""Gets the intensity_id of this ThemeEffectMap. # noqa: E501
:return: The intensity_id of this ThemeEffectMap. # noqa: E501
:rtype: int
"""
return self._intensity_id
@intensity_id.setter
def intensity_id(self, intensity_id):
"""Sets the intensity_id of this ThemeEffectMap.
:param intensity_id: The intensity_id of this ThemeEffectMap. # noqa: E501
:type: int
"""
self._intensity_id = intensity_id
@property
def id(self):
"""Gets the id of this ThemeEffectMap. # noqa: E501
:return: The id of this ThemeEffectMap. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ThemeEffectMap.
:param id: The id of this ThemeEffectMap. # noqa: E501
:type: str
"""
self._id = id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ThemeEffectMap):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ThemeEffectMap):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
] | |
43d3b7fcd74fe33c44b8d93e53d04867f5334898 | 04803c70bb97012b7d500a177ac0240fb2ddbe38 | /1heptane/pdep/network4244_1.py | 5145107f893228a8e18edf49ec3b8ccbfc48d9cc | [] | no_license | shenghuiqin/chpd | 735e0415f6688d88579fc935459c1b0f53596d1d | 396ba54629036e3f2be0b3fabe09b78c90d56939 | refs/heads/master | 2023-03-01T23:29:02.118150 | 2019-10-05T04:02:23 | 2019-10-05T04:02:23 | 192,084,217 | 0 | 0 | null | 2019-06-18T18:33:13 | 2019-06-15T13:52:28 | HTML | UTF-8 | Python | false | false | 84,283 | py | species(
label = 'C=C([CH]C)C[CH]C(24171)',
structure = SMILES('[CH2]C(=CC)C[CH]C'),
E0 = (230.563,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,3010,987.5,1337.5,450,1655,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,350,440,435,1725,2750,2850,1437.5,1250,1305,750,350,3000,3100,440,815,1455,1000,357.285,2038.33],'cm^-1')),
HinderedRotor(inertia=(0.0814701,'amu*angstrom^2'), symmetry=1, barrier=(7.37999,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0814702,'amu*angstrom^2'), symmetry=1, barrier=(7.37999,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0013206,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0814701,'amu*angstrom^2'), symmetry=1, barrier=(7.37998,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.526491,'amu*angstrom^2'), symmetry=1, barrier=(47.6916,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.840817,0.0668233,-3.93337e-05,1.17465e-08,-1.46239e-12,27845.5,29.2386], Tmin=(100,'K'), Tmax=(1751.26,'K')), NASAPolynomial(coeffs=[12.8576,0.0393763,-1.58248e-05,2.79727e-09,-1.84852e-13,23636.5,-35.4691], Tmin=(1751.26,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(230.563,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(RCCJC) + radical(Allyl_P)"""),
)
species(
label = 'C3H6(72)',
structure = SMILES('C=CC'),
E0 = (5.9763,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(0.497558,'amu*angstrom^2'), symmetry=1, barrier=(11.4398,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (42.0797,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2218.31,'J/mol'), sigma=(4.982,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.31912,0.00817959,3.34736e-05,-4.36194e-08,1.58213e-11,749.325,9.54025], Tmin=(100,'K'), Tmax=(983.754,'K')), NASAPolynomial(coeffs=[5.36755,0.0170743,-6.35108e-06,1.1662e-09,-8.2762e-14,-487.138,-4.54468], Tmin=(983.754,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(5.9763,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(203.705,'J/(mol*K)'), label="""C3H6""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'CH3CHCCH2(18175)',
structure = SMILES('C=C=CC'),
E0 = (145.615,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,540,610,2055,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(0.759584,'amu*angstrom^2'), symmetry=1, barrier=(17.4643,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (54.0904,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2996.71,'J/mol'), sigma=(5.18551,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=468.08 K, Pc=48.77 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.74635,0.0218189,8.22353e-06,-2.14768e-08,8.55624e-12,17563.6,12.7381], Tmin=(100,'K'), Tmax=(1025.6,'K')), NASAPolynomial(coeffs=[6.82078,0.0192338,-7.45622e-06,1.36536e-09,-9.53195e-14,16028,-10.4333], Tmin=(1025.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(145.615,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(228.648,'J/(mol*K)'), label="""CH3CHCCH2""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = '[CH2]C1([CH]C)CC1C(24224)',
structure = SMILES('[CH2]C1([CH]C)CC1C'),
E0 = (316.349,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.968205,0.0489647,2.86241e-05,-6.7546e-08,2.77792e-11,38172.7,27.7912], Tmin=(100,'K'), Tmax=(1002.45,'K')), NASAPolynomial(coeffs=[15.0332,0.0350469,-1.37018e-05,2.60034e-09,-1.88281e-13,33232.3,-50.6754], Tmin=(1002.45,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(316.349,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(440.667,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsCsCs) + group(Cs-CsCsCsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + ring(Cyclopropane) + radical(Cs_S) + radical(Neopentyl)"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25472.7,-0.459566], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = '[CH2]C(C=CC)=CC(24268)',
structure = SMILES('[CH2]C(C=CC)=CC'),
E0 = (135.779,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3000,3100,440,815,1455,1000,2995,3010,3025,975,987.5,1000,1300,1337.5,1375,400,450,500,1630,1655,1680,180],'cm^-1')),
HinderedRotor(inertia=(0.729417,'amu*angstrom^2'), symmetry=1, barrier=(16.7707,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.729494,'amu*angstrom^2'), symmetry=1, barrier=(16.7725,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.729468,'amu*angstrom^2'), symmetry=1, barrier=(16.7719,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.729518,'amu*angstrom^2'), symmetry=1, barrier=(16.7731,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (95.1622,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.465196,0.0665121,-2.88623e-05,-8.26848e-09,7.50332e-12,16467.4,25.1769], Tmin=(100,'K'), Tmax=(1031.76,'K')), NASAPolynomial(coeffs=[14.9174,0.0329066,-1.26059e-05,2.29195e-09,-1.59489e-13,12291.6,-50.7825], Tmin=(1031.76,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(135.779,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(415.724,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + group(Cds-Cds(Cds-Cds)H) + radical(Allyl_P)"""),
)
species(
label = 'C=CCC(=C)[CH]C(24175)',
structure = SMILES('[CH2]C(=CC)CC=C'),
E0 = (165.168,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2995,3025,975,1000,1300,1375,400,500,1630,1680,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,350,440,435,1725,2750,2850,1437.5,1250,1305,750,350,3000,3100,440,815,1455,1000,180,978.543],'cm^-1')),
HinderedRotor(inertia=(0.075462,'amu*angstrom^2'), symmetry=1, barrier=(1.73502,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.754747,'amu*angstrom^2'), symmetry=1, barrier=(17.3531,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.754909,'amu*angstrom^2'), symmetry=1, barrier=(17.3568,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.75496,'amu*angstrom^2'), symmetry=1, barrier=(17.358,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (95.1622,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.681576,0.0618448,-2.07951e-05,-1.24754e-08,7.90273e-12,19994.1,27.5811], Tmin=(100,'K'), Tmax=(1063.47,'K')), NASAPolynomial(coeffs=[13.5977,0.0351299,-1.39558e-05,2.57106e-09,-1.79378e-13,16010.5,-41.3398], Tmin=(1063.47,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(165.168,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(415.724,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)(Cds-Cds)HH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Allyl_P)"""),
)
species(
label = 'C=[C][CH]C(18176)',
structure = SMILES('[CH2][C]=CC'),
E0 = (361.056,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(0.352622,'amu*angstrom^2'), symmetry=1, barrier=(8.10748,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.828631,'amu*angstrom^2'), symmetry=1, barrier=(19.0519,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (54.0904,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.42015,0.030446,-1.69076e-05,4.64684e-09,-5.12013e-13,43485.7,14.8304], Tmin=(100,'K'), Tmax=(2065.83,'K')), NASAPolynomial(coeffs=[10.7464,0.014324,-5.20136e-06,8.69079e-10,-5.48385e-14,40045.6,-31.3799], Tmin=(2065.83,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(361.056,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(274.378,'J/(mol*K)'), comment="""Thermo library: DFT_QCI_thermo + radical(Cds_S) + radical(Allyl_P)"""),
)
species(
label = 'C3H6(T)(143)',
structure = SMILES('[CH2][CH]C'),
E0 = (284.865,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000],'cm^-1')),
HinderedRotor(inertia=(0.238389,'amu*angstrom^2'), symmetry=1, barrier=(5.48103,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00909639,'amu*angstrom^2'), symmetry=1, barrier=(22.1005,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (42.0797,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.93778,0.0190991,4.26842e-06,-1.44873e-08,5.74941e-12,34303.2,12.9695], Tmin=(100,'K'), Tmax=(1046.81,'K')), NASAPolynomial(coeffs=[5.93909,0.0171892,-6.69152e-06,1.21546e-09,-8.39795e-14,33151.2,-4.14888], Tmin=(1046.81,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(284.865,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(199.547,'J/(mol*K)'), label="""C3H6(T)""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = '[CH2]C([CH]CC)=CC(24235)',
structure = SMILES('[CH2]C([CH]CC)=CC'),
E0 = (177.229,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.550561,0.0647337,-2.2494e-05,-1.02113e-08,6.70686e-12,21449.3,26.446], Tmin=(100,'K'), Tmax=(1097.32,'K')), NASAPolynomial(coeffs=[13.2783,0.0390642,-1.57369e-05,2.89635e-09,-2.00968e-13,17408.2,-41.8261], Tmin=(1097.32,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(177.229,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(Allyl_S) + radical(Allyl_P)"""),
)
species(
label = '[CH2]CCC([CH2])=CC(24269)',
structure = SMILES('[CH2]CCC([CH2])=CC'),
E0 = (241.363,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,3010,987.5,1337.5,450,1655,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,350,440,435,1725,1972.66,4000],'cm^-1')),
HinderedRotor(inertia=(0.159683,'amu*angstrom^2'), symmetry=1, barrier=(9.37025,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.49658,'amu*angstrom^2'), symmetry=1, barrier=(29.1363,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.159684,'amu*angstrom^2'), symmetry=1, barrier=(9.37009,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.496523,'amu*angstrom^2'), symmetry=1, barrier=(29.1365,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.496531,'amu*angstrom^2'), symmetry=1, barrier=(29.1364,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.187715,0.0727305,-4.74989e-05,1.57475e-08,-2.11977e-12,29175.8,30.6649], Tmin=(100,'K'), Tmax=(1716.14,'K')), NASAPolynomial(coeffs=[17.3493,0.03273,-1.25364e-05,2.16563e-09,-1.41227e-13,23285.4,-61.3985], Tmin=(1716.14,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(241.363,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(RCCJ)"""),
)
species(
label = 'C[CH][CH]C(C)=CC(24270)',
structure = SMILES('C[CH]C=C(C)[CH]C'),
E0 = (167.977,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.82433,0.0581713,-7.43779e-06,-2.31978e-08,1.06492e-11,20327.2,24.7437], Tmin=(100,'K'), Tmax=(1080.15,'K')), NASAPolynomial(coeffs=[12.103,0.0405352,-1.6457e-05,3.05113e-09,-2.1296e-13,16483,-37.0552], Tmin=(1080.15,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(167.977,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(Allyl_S) + radical(Allyl_S)"""),
)
species(
label = 'C[C]=C(C)C[CH]C(24271)',
structure = SMILES('C[C]=C(C)C[CH]C'),
E0 = (316.905,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,3025,407.5,1350,352.5,2750,2762.5,2775,2787.5,2800,2812.5,2825,2837.5,2850,1350,1380,1410,1440,1470,1500,700,750,800,1000,1050,1100,1350,1375,1400,900,1000,1100,1685,370,350,440,435,1725,226.947,2510.41],'cm^-1')),
HinderedRotor(inertia=(0.00327337,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.202056,'amu*angstrom^2'), symmetry=1, barrier=(7.38475,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.202063,'amu*angstrom^2'), symmetry=1, barrier=(7.38462,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.202085,'amu*angstrom^2'), symmetry=1, barrier=(7.38462,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.202084,'amu*angstrom^2'), symmetry=1, barrier=(7.38457,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.02572,0.0541081,3.51348e-05,-1.85085e-07,1.69623e-10,38174.7,25.4701], Tmin=(100,'K'), Tmax=(419.643,'K')), NASAPolynomial(coeffs=[3.82194,0.0538254,-2.40443e-05,4.55016e-09,-3.16869e-13,37875.6,16.5975], Tmin=(419.643,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(316.905,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(RCCJC) + radical(Cds_S)"""),
)
species(
label = '[CH2]C(=[C]C)CCC(24272)',
structure = SMILES('[CH2]C(=[C]C)CCC'),
E0 = (273.958,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,1685,370,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3000,3100,440,815,1455,1000,350,440,435,1725,248.252,248.351],'cm^-1')),
HinderedRotor(inertia=(0.00273459,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00273508,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.132651,'amu*angstrom^2'), symmetry=1, barrier=(5.80247,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.295917,'amu*angstrom^2'), symmetry=1, barrier=(12.9452,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.483631,'amu*angstrom^2'), symmetry=1, barrier=(21.1565,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.524652,0.0721738,-4.79101e-05,1.65335e-08,-2.37732e-12,33077.7,28.115], Tmin=(100,'K'), Tmax=(1567.7,'K')), NASAPolynomial(coeffs=[13.7461,0.0384395,-1.56328e-05,2.80758e-09,-1.88477e-13,28932.3,-41.6152], Tmin=(1567.7,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(273.958,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(Cds_S)"""),
)
species(
label = 'C=C[C](C)C[CH]C(19167)',
structure = SMILES('C=C[C](C)C[CH]C'),
E0 = (230.593,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,3010,987.5,1337.5,450,1655,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,2950,3100,1380,975,1025,1650,360,370,350,2750,2850,1437.5,1250,1305,750,350,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.892072,0.0600801,-2.42405e-05,-2.45853e-10,1.86411e-12,27852.5,28.7245], Tmin=(100,'K'), Tmax=(1266.31,'K')), NASAPolynomial(coeffs=[10.3481,0.0418611,-1.64597e-05,2.92328e-09,-1.95921e-13,24523.5,-22.8164], Tmin=(1266.31,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(230.593,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Allyl_T) + radical(RCCJC)"""),
)
species(
label = '[CH2][CH]CC(C)=CC(24273)',
structure = SMILES('[CH2][CH]CC(C)=CC'),
E0 = (284.31,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,3010,987.5,1337.5,450,1655,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,350,440,435,1725,2750,2850,1437.5,1250,1305,750,350,3000,3100,440,815,1455,1000,237.749,2078.8],'cm^-1')),
HinderedRotor(inertia=(0.00298216,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156111,'amu*angstrom^2'), symmetry=1, barrier=(6.26303,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00298224,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156122,'amu*angstrom^2'), symmetry=1, barrier=(6.26291,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.15611,'amu*angstrom^2'), symmetry=1, barrier=(6.26313,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.30307,0.064992,-4.06959e-05,1.49569e-08,-2.66661e-12,34286.8,29.0938], Tmin=(100,'K'), Tmax=(1129.12,'K')), NASAPolynomial(coeffs=[5.15778,0.0513364,-2.25549e-05,4.246e-09,-2.95091e-13,33416.3,10.029], Tmin=(1129.12,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(284.31,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(RCCJ) + radical(RCCJC)"""),
)
species(
label = '[CH2][C](C=C)CCC(3296)',
structure = SMILES('[CH2]C=C([CH2])CCC'),
E0 = (187.616,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.381084,0.0671752,-2.21408e-05,-1.56763e-08,9.82109e-12,22706,28.4379], Tmin=(100,'K'), Tmax=(1040.54,'K')), NASAPolynomial(coeffs=[14.9511,0.0364389,-1.42653e-05,2.62039e-09,-1.83185e-13,18305.7,-49.0072], Tmin=(1040.54,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(187.616,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(Allyl_P)"""),
)
species(
label = 'C[CH]C[C]1CC1C(24274)',
structure = SMILES('C[CH]C[C]1CC1C'),
E0 = (308.738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.11982,0.0560976,-1.86944e-05,-2.77089e-09,2.09126e-12,37242,29.5612], Tmin=(100,'K'), Tmax=(1354.62,'K')), NASAPolynomial(coeffs=[9.83964,0.0424915,-1.70734e-05,3.04844e-09,-2.03921e-13,33765.6,-19.2655], Tmin=(1354.62,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(308.738,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(440.667,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsCsH) + group(Cs-CsCsCsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + ring(Cyclopropane) + radical(RCCJC) + radical(Tertalkyl)"""),
)
species(
label = '[CH2][C]1CC(C)C1C(24275)',
structure = SMILES('[CH2][C]1CC(C)C1C'),
E0 = (305.913,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.33424,0.0452312,2.59334e-05,-5.66379e-08,2.26382e-11,36900.5,26.8575], Tmin=(100,'K'), Tmax=(999.267,'K')), NASAPolynomial(coeffs=[10.3263,0.0410187,-1.54512e-05,2.8007e-09,-1.95369e-13,33516.7,-24.4571], Tmin=(999.267,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(305.913,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(444.824,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsCsH) + group(Cs-CsCsCsH) + group(Cs-CsCsCsH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + ring(Cyclobutane) + radical(Tertalkyl) + radical(Isobutyl)"""),
)
species(
label = 'CC=CC(C)=CC(24276)',
structure = SMILES('CC=CC(C)=CC'),
E0 = (-15.7206,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.366519,0.0700565,-4.05138e-05,8.23894e-09,5.44902e-13,-1751.78,25.2579], Tmin=(100,'K'), Tmax=(1181.85,'K')), NASAPolynomial(coeffs=[13.897,0.0369024,-1.44776e-05,2.6018e-09,-1.76919e-13,-5832.77,-46.0144], Tmin=(1181.85,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-15.7206,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(440.667,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + group(Cds-Cds(Cds-Cds)H)"""),
)
species(
label = 'C=CCC(C)=CC(24277)',
structure = SMILES('C=CCC(C)=CC'),
E0 = (13.6692,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.601563,0.0651294,-3.13621e-05,2.39412e-09,1.73181e-12,1774.23,27.598], Tmin=(100,'K'), Tmax=(1226.09,'K')), NASAPolynomial(coeffs=[12.8372,0.0387397,-1.56269e-05,2.8372e-09,-1.93405e-13,-2242.99,-38.0726], Tmin=(1226.09,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(13.6692,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(440.667,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)(Cds-Cds)HH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + group(Cds-CdsHH)"""),
)
species(
label = 'CH2(S)(23)',
structure = SMILES('[CH2]'),
E0 = (419.862,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1369.36,2789.41,2993.36],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.19195,-0.00230793,8.0509e-06,-6.60123e-09,1.95638e-12,50484.3,-0.754589], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.28556,0.00460255,-1.97412e-06,4.09548e-10,-3.34695e-14,50922.4,8.67684], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(419.862,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = '[CH2]C(=C)C[CH]C(24278)',
structure = SMILES('[CH2]C(=C)C[CH]C'),
E0 = (266.588,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,350,440,435,1725,2750,2850,1437.5,1250,1305,750,350,3000,3100,440,815,1455,1000,180,1948.01],'cm^-1')),
HinderedRotor(inertia=(0.0680026,'amu*angstrom^2'), symmetry=1, barrier=(27.1181,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(5.20297,'amu*angstrom^2'), symmetry=1, barrier=(119.627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.278491,'amu*angstrom^2'), symmetry=1, barrier=(6.40305,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.010536,'amu*angstrom^2'), symmetry=1, barrier=(119.627,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (82.1436,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.07974,0.0556446,-3.26018e-05,9.62163e-09,-1.15636e-12,32175.6,25.9137], Tmin=(100,'K'), Tmax=(1886.95,'K')), NASAPolynomial(coeffs=[14.3542,0.0275054,-1.02333e-05,1.71884e-09,-1.09339e-13,27165.9,-46.5569], Tmin=(1886.95,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(266.588,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(365.837,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + radical(RCCJC) + radical(Allyl_P)"""),
)
species(
label = '[CH2]C(C)C(=C)[CH]C(24172)',
structure = SMILES('[CH2]C(=CC)C([CH2])C'),
E0 = (237.411,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,3010,987.5,1337.5,450,1655,1380,1390,370,380,2900,435,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,4000],'cm^-1')),
HinderedRotor(inertia=(0.0358237,'amu*angstrom^2'), symmetry=1, barrier=(17.0825,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(3.92092,'amu*angstrom^2'), symmetry=1, barrier=(90.1497,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.742218,'amu*angstrom^2'), symmetry=1, barrier=(17.065,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.172372,'amu*angstrom^2'), symmetry=1, barrier=(3.96316,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(3.93149,'amu*angstrom^2'), symmetry=1, barrier=(90.3926,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3479.64,'J/mol'), sigma=(6.29859,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=543.51 K, Pc=31.6 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.441935,0.0682285,-3.06307e-05,-5.44961e-09,6.38237e-12,28690.7,29.4733], Tmin=(100,'K'), Tmax=(1022.38,'K')), NASAPolynomial(coeffs=[13.4828,0.0369601,-1.37359e-05,2.43177e-09,-1.6594e-13,24991.8,-38.7786], Tmin=(1022.38,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(237.411,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + longDistanceInteraction_noncyclic(CdCs-ST) + group(Cds-CdsCsH) + radical(Isobutyl) + radical(Allyl_P)"""),
)
species(
label = 'C[CH]CC[C]=CC(19228)',
structure = SMILES('C[CH]CC[C]=CC'),
E0 = (332.18,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,3010,987.5,1337.5,450,1655,1685,370,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3025,407.5,1350,352.5,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.26714,0.0653856,-4.37282e-05,1.92202e-08,-4.37429e-12,40046,29.4112], Tmin=(100,'K'), Tmax=(906.265,'K')), NASAPolynomial(coeffs=[4.00506,0.0533011,-2.37267e-05,4.50669e-09,-3.1546e-13,39549.7,16.4718], Tmin=(906.265,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(332.18,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Cds_S) + radical(RCCJC)"""),
)
species(
label = 'CC=C1CC(C)C1(24256)',
structure = SMILES('CC=C1CC(C)C1'),
E0 = (31.5064,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[10.4891,-0.0122365,0.000145495,-1.38082e-07,3.18834e-11,3441.47,-17.0613], Tmin=(100,'K'), Tmax=(1694.81,'K')), NASAPolynomial(coeffs=[70.7806,0.0429634,-7.81564e-05,1.86507e-08,-1.3786e-12,-45359.2,-423.421], Tmin=(1694.81,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(31.5064,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(448.981,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsCsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + ring(methylenecyclobutane)"""),
)
species(
label = 'CHCH3(T)(95)',
structure = SMILES('[CH]C'),
E0 = (343.893,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,592.414,4000],'cm^-1')),
HinderedRotor(inertia=(0.00438699,'amu*angstrom^2'), symmetry=1, barrier=(26.7685,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (28.0532,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.82363,-0.000909515,3.2138e-05,-3.7348e-08,1.3309e-11,41371.4,7.10948], Tmin=(100,'K'), Tmax=(960.812,'K')), NASAPolynomial(coeffs=[4.30487,0.00943069,-3.27559e-06,5.95121e-10,-4.27307e-14,40709.1,1.84202], Tmin=(960.812,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(343.893,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(128.874,'J/(mol*K)'), label="""CHCH3(T)""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = '[CH2]C([CH2])=CC(24219)',
structure = SMILES('[CH2]C([CH2])=CC'),
E0 = (234.041,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(0.0177712,'amu*angstrom^2'), symmetry=1, barrier=(20.2255,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.87837,'amu*angstrom^2'), symmetry=1, barrier=(20.1954,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(4.61389,'amu*angstrom^2'), symmetry=1, barrier=(106.082,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (68.117,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.80244,0.0390483,-7.97741e-07,-2.45997e-08,1.16044e-11,28236,18.1778], Tmin=(100,'K'), Tmax=(1004.18,'K')), NASAPolynomial(coeffs=[10.9852,0.023482,-8.93213e-06,1.6381e-09,-1.15442e-13,25332.4,-31.4366], Tmin=(1004.18,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(234.041,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(Allyl_P)"""),
)
species(
label = 'CH2(19)',
structure = SMILES('[CH2]'),
E0 = (381.563,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1032.72,2936.3,3459],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.8328,0.000224446,4.68033e-06,-6.04743e-09,2.59009e-12,45920.8,1.40666], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[3.16229,0.00281798,-7.56235e-07,5.05446e-11,5.65236e-15,46099.1,4.77656], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(381.563,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'C[CH]C[C]=CC(24192)',
structure = SMILES('C[CH]C[C]=CC'),
E0 = (355.96,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,3010,987.5,1337.5,450,1655,1685,370,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3025,407.5,1350,352.5,272.37,2221.18],'cm^-1')),
HinderedRotor(inertia=(0.00227236,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.148422,'amu*angstrom^2'), symmetry=1, barrier=(7.81357,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.148424,'amu*angstrom^2'), symmetry=1, barrier=(7.81357,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.148422,'amu*angstrom^2'), symmetry=1, barrier=(7.81357,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (82.1436,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.25446,0.047533,-2.24803e-05,4.72442e-09,-3.81653e-13,42864.6,23.5426], Tmin=(100,'K'), Tmax=(2781.8,'K')), NASAPolynomial(coeffs=[19.681,0.022476,-8.96952e-06,1.48664e-09,-9.0685e-14,33168.8,-78.3599], Tmin=(2781.8,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(355.96,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(365.837,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Cds_S) + radical(RCCJC)"""),
)
species(
label = 'C=CC(=C)C[CH]C(19164)',
structure = SMILES('C=CC(=C)C[CH]C'),
E0 = (204.351,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,3010,987.5,1337.5,450,1655,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3000,3050,3100,1330,1430,900,1050,1000,1050,1600,1700,350,440,435,1725,2750,2850,1437.5,1250,1305,750,350,325.967,325.97,325.971],'cm^-1')),
HinderedRotor(inertia=(0.0234883,'amu*angstrom^2'), symmetry=1, barrier=(1.77106,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0234892,'amu*angstrom^2'), symmetry=1, barrier=(1.77113,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.194384,'amu*angstrom^2'), symmetry=1, barrier=(14.6567,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.19437,'amu*angstrom^2'), symmetry=1, barrier=(14.6566,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (95.1622,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.607111,0.0653555,-3.42158e-05,2.15645e-09,2.88851e-12,24707.8,28.6688], Tmin=(100,'K'), Tmax=(1085.78,'K')), NASAPolynomial(coeffs=[13.0148,0.0349811,-1.34394e-05,2.40786e-09,-1.64502e-13,21109.4,-36.3749], Tmin=(1085.78,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(204.351,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(415.724,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(RCCJC)"""),
)
species(
label = '[CH2]CC(=C)C[CH]C(24279)',
structure = SMILES('[CH2]CC(=C)C[CH]C'),
E0 = (297.69,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,350,440,435,1725,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,3000,3100,440,815,1455,1000,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.924397,0.0658228,-3.85648e-05,1.14822e-08,-1.42991e-12,35915.3,31.5477], Tmin=(100,'K'), Tmax=(1740.2,'K')), NASAPolynomial(coeffs=[12.3144,0.039642,-1.59979e-05,2.8369e-09,-1.87928e-13,31951,-29.7127], Tmin=(1740.2,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(297.69,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + radical(RCCJ) + radical(RCCJC)"""),
)
species(
label = 'C=C([CH][CH]C)CC(24280)',
structure = SMILES('[CH2]C(=C[CH]C)CC'),
E0 = (178.364,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.6455,0.0607146,-7.39766e-06,-2.83229e-08,1.36484e-11,21584.3,26.7698], Tmin=(100,'K'), Tmax=(1035.37,'K')), NASAPolynomial(coeffs=[13.8717,0.037755,-1.48994e-05,2.75541e-09,-1.93573e-13,17337.3,-44.7818], Tmin=(1035.37,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(178.364,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(Allyl_S)"""),
)
species(
label = '[CH]=C(CC)C[CH]C(24281)',
structure = SMILES('[CH]=C(CC)C[CH]C'),
E0 = (339.54,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,3025,407.5,1350,352.5,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3120,650,792.5,1650,350,440,435,1725,287.341,1535.51],'cm^-1')),
HinderedRotor(inertia=(0.0020416,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00203636,'amu*angstrom^2'), symmetry=1, barrier=(0.119657,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153541,'amu*angstrom^2'), symmetry=1, barrier=(9.05865,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.154017,'amu*angstrom^2'), symmetry=1, barrier=(9.06246,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.155556,'amu*angstrom^2'), symmetry=1, barrier=(9.07373,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.883161,0.0678438,-4.23764e-05,1.39494e-08,-1.95975e-12,40949.3,30.3095], Tmin=(100,'K'), Tmax=(1550.14,'K')), NASAPolynomial(coeffs=[11.0007,0.0417364,-1.71136e-05,3.08472e-09,-2.07548e-13,37812.6,-22.9371], Tmin=(1550.14,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(339.54,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + radical(Cds_P) + radical(RCCJC)"""),
)
species(
label = '[CH]=C([CH]C)CCC(24282)',
structure = SMILES('[CH]C(=CC)CCC'),
E0 = (255.302,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,3010,987.5,1337.5,450,1655,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,350,440,435,1725,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.197486,0.0737448,-4.23702e-05,1.19747e-08,-1.37166e-12,30850.9,29.6804], Tmin=(100,'K'), Tmax=(1964.11,'K')), NASAPolynomial(coeffs=[18.3382,0.0368005,-1.41557e-05,2.39799e-09,-1.52706e-13,23724.9,-70.0837], Tmin=(1964.11,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(255.302,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH2][CH]CC(=C)CC(24283)',
structure = SMILES('[CH2][CH]CC(=C)CC'),
E0 = (297.69,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,350,440,435,1725,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,3000,3100,440,815,1455,1000,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.924397,0.0658228,-3.85648e-05,1.14822e-08,-1.42991e-12,35915.3,31.5477], Tmin=(100,'K'), Tmax=(1740.2,'K')), NASAPolynomial(coeffs=[12.3144,0.039642,-1.59979e-05,2.8369e-09,-1.87928e-13,31951,-29.7127], Tmin=(1740.2,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(297.69,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + radical(RCCJ) + radical(RCCJC)"""),
)
species(
label = 'C[CH][C]1CC(C)C1(24284)',
structure = SMILES('C[CH][C]1CC(C)C1'),
E0 = (304.414,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.43715,0.0453823,1.58051e-05,-3.8158e-08,1.38124e-11,36713.9,28.245], Tmin=(100,'K'), Tmax=(1112.49,'K')), NASAPolynomial(coeffs=[8.60969,0.045401,-1.90173e-05,3.56204e-09,-2.4894e-13,33521,-14.3007], Tmin=(1112.49,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(304.414,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(444.824,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsCsH) + group(Cs-CsCsCsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + ring(Cyclobutane) + radical(Cs_S) + radical(Tertalkyl)"""),
)
species(
label = 'C=C(C=CC)CC(24285)',
structure = SMILES('C=C(C=CC)CC'),
E0 = (-2.34046,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.471798,0.065208,-1.89932e-05,-1.92017e-08,1.1352e-11,-143.675,25.9787], Tmin=(100,'K'), Tmax=(1018.92,'K')), NASAPolynomial(coeffs=[14.8445,0.0350786,-1.33469e-05,2.43062e-09,-1.69787e-13,-4437.52,-50.3286], Tmin=(1018.92,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-2.34046,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(440.667,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH)"""),
)
species(
label = 'C=CCC(=C)CC(24286)',
structure = SMILES('C=CCC(=C)CC'),
E0 = (27.0493,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.690396,0.0605209,-1.08892e-05,-2.34066e-08,1.17276e-11,3382.97,28.3746], Tmin=(100,'K'), Tmax=(1044.52,'K')), NASAPolynomial(coeffs=[13.4765,0.0373792,-1.47395e-05,2.7195e-09,-1.90466e-13,-696.74,-40.6109], Tmin=(1044.52,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(27.0493,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(440.667,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)(Cds-Cds)HH) + group(Cs-CsHHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + group(Cds-CdsHH) + group(Cds-CdsHH)"""),
)
species(
label = 'C=CC(=C)CCC(3302)',
structure = SMILES('C=CC(=C)CCC'),
E0 = (9.90489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.429571,0.064938,-1.42316e-05,-2.68111e-08,1.46756e-11,1331.8,26.5681], Tmin=(100,'K'), Tmax=(999.682,'K')), NASAPolynomial(coeffs=[15.8208,0.033571,-1.2507e-05,2.27564e-09,-1.59971e-13,-3255.39,-55.2328], Tmin=(999.682,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(9.90489,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(440.667,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH) + group(Cds-CdsHH)"""),
)
species(
label = 'C=[C]C(C)C[CH]C(19169)',
structure = SMILES('C=[C]C(C)C[CH]C'),
E0 = (336.454,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3025,407.5,1350,352.5,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,2950,3100,1380,975,1025,1650,1380,1390,370,380,2900,435,2750,2850,1437.5,1250,1305,750,350,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.0597,0.0647849,-3.73125e-05,1.09539e-08,-1.35798e-12,40570.8,30.3163], Tmin=(100,'K'), Tmax=(1715.74,'K')), NASAPolynomial(coeffs=[11.094,0.0413914,-1.68606e-05,3.00709e-09,-2.00057e-13,37127.5,-23.5102], Tmin=(1715.74,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(336.454,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(436.51,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(RCCJC) + radical(Cds_S)"""),
)
species(
label = 'C=C1CC(C)C1C(24267)',
structure = SMILES('C=C1CC(C)C1C'),
E0 = (35.7798,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (96.1702,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[9.99122,-0.0100132,0.000144281,-1.38728e-07,3.23306e-11,3980.95,-16.4526], Tmin=(100,'K'), Tmax=(1679.8,'K')), NASAPolynomial(coeffs=[69.3651,0.0437352,-7.79586e-05,1.86251e-08,-1.37959e-12,-43496.9,-415.639], Tmin=(1679.8,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(35.7798,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(448.981,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsCsH) + group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + ring(methylenecyclobutane)"""),
)
species(
label = 'C=[C]C[CH]C(2608)',
structure = SMILES('C=[C]C[CH]C'),
E0 = (391.986,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2950,3100,1380,975,1025,1650,1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,328.839,1764.65],'cm^-1')),
HinderedRotor(inertia=(0.070318,'amu*angstrom^2'), symmetry=1, barrier=(5.38273,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0701107,'amu*angstrom^2'), symmetry=1, barrier=(5.38037,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0702146,'amu*angstrom^2'), symmetry=1, barrier=(5.38499,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (68.117,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.41566,0.0369675,-1.69643e-05,3.43217e-09,-2.57685e-13,47199.8,21.2179], Tmin=(100,'K'), Tmax=(2427.98,'K')), NASAPolynomial(coeffs=[16.5625,0.0166401,-6.24654e-06,9.9465e-10,-5.87348e-14,39452.1,-61.39], Tmin=(2427.98,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(391.986,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(RCCJC) + radical(Cds_S)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43376e-09,2.58636e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.9759,0.00164141,-7.19722e-07,1.25378e-10,-7.91526e-15,-1025.84,5.53757], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (230.563,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (316.349,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (368.728,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (379.471,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (379.499,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (462.507,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (355.665,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (393.242,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (433.138,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (478.826,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (318.267,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (380.38,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (338.493,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (308.594,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (645.922,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (461.779,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (355.664,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (293.963,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (255.536,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (686.45,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS21',
E0 = (397.346,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS22',
E0 = (502.218,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS23',
E0 = (238.847,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS24',
E0 = (577.934,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS25',
E0 = (737.523,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS26',
E0 = (416.143,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS27',
E0 = (411.495,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS28',
E0 = (439.763,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS29',
E0 = (484.725,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS30',
E0 = (299.61,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS31',
E0 = (344.133,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS32',
E0 = (355.664,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS33',
E0 = (293.963,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS34',
E0 = (238.931,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS35',
E0 = (238.931,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS36',
E0 = (430.928,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS37',
E0 = (238.847,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS38',
E0 = (735.879,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['C3H6(72)', 'CH3CHCCH2(18175)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['[CH2]C1([CH]C)CC1C(24224)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(3.36329e+10,'s^-1'), n=0.535608, Ea=(85.7861,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R4_S_D;doublebond_intra;radadd_intra_csHNd] + [R4_S_D;doublebond_intra_HNd;radadd_intra_cs] for rate rule [R4_S_D;doublebond_intra_HNd;radadd_intra_csHNd]
Euclidian distance = 1.0
family: Intra_R_Add_Exocyclic
Ea raised from 83.4 to 85.8 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction3',
reactants = ['H(3)', '[CH2]C(C=CC)=CC(24268)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(0.0272924,'m^3/(mol*s)'), n=2.81111, Ea=(21.1569,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""From training reaction 26 used for Cds-CdH_Cds-CsH;HJ
Exact match found for rate rule [Cds-CdH_Cds-CsH;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction4',
reactants = ['H(3)', 'C=CCC(=C)[CH]C(24175)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(3.36e+08,'cm^3/(mol*s)'), n=1.56, Ea=(2.5104,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 9 used for Cds-HH_Cds-CsH;HJ
Exact match found for rate rule [Cds-HH_Cds-CsH;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction5',
reactants = ['C3H6(72)', 'C=[C][CH]C(18176)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(0.00620445,'m^3/(mol*s)'), n=2.46568, Ea=(12.4666,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Cds-HH_Cds-Cs\H3/H;CJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction6',
reactants = ['C3H6(T)(143)', 'CH3CHCCH2(18175)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(0.00086947,'m^3/(mol*s)'), n=2.67356, Ea=(32.0272,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Ca_Cds-HH;CJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction7',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['[CH2]C([CH]CC)=CC(24235)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(1.682e+10,'s^-1'), n=0.35, Ea=(125.102,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 160 used for R2H_S;C_rad_out_H/NonDeC;Cs_H_out_H/Cd
Exact match found for rate rule [R2H_S;C_rad_out_H/NonDeC;Cs_H_out_H/Cd]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction8',
reactants = ['[CH2]CCC([CH2])=CC(24269)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(718000,'s^-1'), n=2.05, Ea=(151.879,'kJ/mol'), T0=(1,'K'), Tmin=(500,'K'), Tmax=(2000,'K'), comment="""From training reaction 147 used for R2H_S;C_rad_out_2H;Cs_H_out_H/NonDeC
Exact match found for rate rule [R2H_S;C_rad_out_2H;Cs_H_out_H/NonDeC]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction9',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['C[CH][CH]C(C)=CC(24270)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(1.09894e+08,'s^-1'), n=1.58167, Ea=(202.575,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3H_SS_2Cd;C_rad_out_2H;XH_out]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction10',
reactants = ['C[C]=C(C)C[CH]C(24271)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(7.74e+09,'s^-1'), n=1.08, Ea=(161.921,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 198 used for R3H_DS;Cd_rad_out_Cs;Cs_H_out_2H
Exact match found for rate rule [R3H_DS;Cd_rad_out_Cs;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction11',
reactants = ['[CH2]C(=[C]C)CCC(24272)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(74200,'s^-1'), n=2.23, Ea=(44.3086,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_DSS;Cd_rad_out_single;Cs_H_out_1H] for rate rule [R4H_DSS;Cd_rad_out_Cs;Cs_H_out_H/NonDeC]
Euclidian distance = 2.2360679775
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction12',
reactants = ['C=C[C](C)C[CH]C(19167)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(800000,'s^-1'), n=1.81, Ea=(149.787,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 101 used for R4H_SDS;C_rad_out_2H;Cs_H_out_2H
Exact match found for rate rule [R4H_SDS;C_rad_out_2H;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction13',
reactants = ['[CH2][CH]CC(C)=CC(24273)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(91273.5,'s^-1'), n=1.79, Ea=(54.1828,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5Hall;C_rad_out_2H;Cs_H_out_2H] for rate rule [R5HJ_1;C_rad_out_2H;Cs_H_out_2H]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction14',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['[CH2][C](C=C)CCC(3296)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(634768,'s^-1'), n=1.77, Ea=(78.0316,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5H_SSMS;C_rad_out_single;Cs_H_out_2H] for rate rule [R5H_SSMS;C_rad_out_H/NonDeC;Cs_H_out_2H]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction15',
reactants = ['C3H6(T)(143)', 'C=[C][CH]C(18176)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(7.46075e+06,'m^3/(mol*s)'), n=0.027223, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Y_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -14.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction16',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['C[CH]C[C]1CC1C(24274)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(3.473e+12,'s^-1'), n=0.247, Ea=(231.216,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R3_D;doublebond_intra_secNd;radadd_intra_cs] for rate rule [R3_D;doublebond_intra_secNd_HNd;radadd_intra_cs2H]
Euclidian distance = 1.41421356237
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction17',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['[CH2][C]1CC(C)C1C(24275)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(5.25757e+07,'s^-1'), n=1.165, Ea=(125.102,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R4_Cs_HH_D;doublebond_intra;radadd_intra_csHCs]
Euclidian distance = 0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction18',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['CC=CC(C)=CC(24276)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(1.4874e+09,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad;XH_Rrad_NDe]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction19',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['C=CCC(C)=CC(24277)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(6.37831e+09,'s^-1'), n=0.137, Ea=(24.9733,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5;Y_rad;XH_Rrad] for rate rule [R5radEndo;Y_rad;XH_Rrad]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction20',
reactants = ['CH2(S)(23)', '[CH2]C(=C)C[CH]C(24278)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(7.94e+13,'cm^3/(mol*s)','*|/',0.25), n=-0.324, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 4 used for carbene;Cd_pri
Exact match found for rate rule [carbene;Cd_pri]
Euclidian distance = 0
Multiplied by reaction path degeneracy 4.0
family: 1,2_Insertion_carbene
Ea raised from -3.9 to 0 kJ/mol."""),
)
reaction(
label = 'reaction21',
reactants = ['[CH2]C(C)C(=C)[CH]C(24172)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS21',
kinetics = Arrhenius(A=(6.55606e+10,'s^-1'), n=0.64, Ea=(159.935,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [cCs(-HC)CJ;CsJ;C] for rate rule [cCs(-HC)CJ;CsJ-HH;C]
Euclidian distance = 1.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction22',
reactants = ['C[CH]CC[C]=CC(19228)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS22',
kinetics = Arrhenius(A=(1.74842e+09,'s^-1'), n=1.084, Ea=(170.038,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [cCsCJ;CdsJ;C] + [cCs(-HH)CJ;CJ;C] for rate rule [cCs(-HH)CJ;CdsJ;C]
Euclidian distance = 1.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction23',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['CC=C1CC(C)C1(24256)'],
transitionState = 'TS23',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), Tmin=(600,'K'), Tmax=(2000,'K'), comment="""Estimated using template [R4_SSS;C_rad_out_single;Cpri_rad_out_2H] for rate rule [R4_SSS;C_rad_out_H/NonDeC;Cpri_rad_out_2H]
Euclidian distance = 2.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction24',
reactants = ['CHCH3(T)(95)', '[CH2]C([CH2])=CC(24219)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS24',
kinetics = Arrhenius(A=(2.13464e+06,'m^3/(mol*s)'), n=0.472793, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [C_rad/H2/Cd;Birad]
Euclidian distance = 3.0
Multiplied by reaction path degeneracy 2.0
family: Birad_R_Recombination
Ea raised from -3.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction25',
reactants = ['CH2(19)', 'C[CH]C[C]=CC(24192)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS25',
kinetics = Arrhenius(A=(1.06732e+06,'m^3/(mol*s)'), n=0.472793, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [Cd_rad/NonDe;Birad]
Euclidian distance = 3.0
family: Birad_R_Recombination
Ea raised from -3.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction26',
reactants = ['H(3)', 'C=CC(=C)C[CH]C(19164)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS26',
kinetics = Arrhenius(A=(2.31e+08,'cm^3/(mol*s)'), n=1.64, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 2544 used for Cds-HH_Cds-CdH;HJ
Exact match found for rate rule [Cds-HH_Cds-CdH;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond
Ea raised from -2.0 to 0 kJ/mol."""),
)
reaction(
label = 'reaction27',
reactants = ['[CH2]CC(=C)C[CH]C(24279)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS27',
kinetics = Arrhenius(A=(1.72e+06,'s^-1'), n=1.99, Ea=(113.805,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 84 used for R2H_S;C_rad_out_2H;Cs_H_out_H/Cd
Exact match found for rate rule [R2H_S;C_rad_out_2H;Cs_H_out_H/Cd]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction28',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['C=C([CH][CH]C)CC(24280)'],
transitionState = 'TS28',
kinetics = Arrhenius(A=(1.23617e+10,'s^-1'), n=1.04667, Ea=(209.2,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3H_SS_2Cd;C_rad_out_H/NonDeC;XH_out]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction29',
reactants = ['[CH]=C(CC)C[CH]C(24281)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS29',
kinetics = Arrhenius(A=(1.846e+10,'s^-1'), n=0.74, Ea=(145.185,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 194 used for R3H_DS;Cd_rad_out_singleH;Cs_H_out_H/NonDeC
Exact match found for rate rule [R3H_DS;Cd_rad_out_singleH;Cs_H_out_H/NonDeC]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction30',
reactants = ['[CH]=C([CH]C)CCC(24282)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS30',
kinetics = Arrhenius(A=(74200,'s^-1'), n=2.23, Ea=(44.3086,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_DSS;Cd_rad_out_singleH;Cs_H_out_1H] for rate rule [R4H_DSS;Cd_rad_out_singleH;Cs_H_out_H/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction31',
reactants = ['[CH2][CH]CC(=C)CC(24283)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS31',
kinetics = Arrhenius(A=(262000,'s^-1'), n=1.62, Ea=(46.4424,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5Hall;C_rad_out_2H;Cs_H_out_H/NonDeC] for rate rule [R5HJ_1;C_rad_out_2H;Cs_H_out_H/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction32',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['C[CH][C]1CC(C)C1(24284)'],
transitionState = 'TS32',
kinetics = Arrhenius(A=(5.25757e+07,'s^-1'), n=1.165, Ea=(125.102,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R4_Cs_HH_D;doublebond_intra;radadd_intra_csHCs]
Euclidian distance = 0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction33',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['C=C(C=CC)CC(24285)'],
transitionState = 'TS33',
kinetics = Arrhenius(A=(1.4874e+09,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 1 used for R3radExo;Y_rad_NDe;XH_Rrad_NDe
Exact match found for rate rule [R3radExo;Y_rad_NDe;XH_Rrad_NDe]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction34',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['C=CCC(=C)CC(24286)'],
transitionState = 'TS34',
kinetics = Arrhenius(A=(9.63e+09,'s^-1'), n=0.137, Ea=(8.368,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R5;Y_rad_NDe;XH_Rrad] for rate rule [R5radEndo;Y_rad_NDe;XH_Rrad]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction35',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['C=CC(=C)CCC(3302)'],
transitionState = 'TS35',
kinetics = Arrhenius(A=(9.63e+09,'s^-1'), n=0.137, Ea=(8.368,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R5;Y_rad_NDe;XH_Rrad] for rate rule [R5radEndo;Y_rad_NDe;XH_Rrad]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction36',
reactants = ['C=[C]C(C)C[CH]C(19169)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS36',
kinetics = Arrhenius(A=(8.66e+11,'s^-1'), n=0.438, Ea=(94.4747,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 5 used for cCs(-HC)CJ;CdsJ;C
Exact match found for rate rule [cCs(-HC)CJ;CdsJ;C]
Euclidian distance = 0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction37',
reactants = ['C=C([CH]C)C[CH]C(24171)'],
products = ['C=C1CC(C)C1C(24267)'],
transitionState = 'TS37',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_SSS;C_rad_out_single;Cpri_rad_out_single] for rate rule [R4_SSS;C_rad_out_H/NonDeC;Cpri_rad_out_H/NonDeC]
Euclidian distance = 2.82842712475
family: Birad_recombination"""),
)
reaction(
label = 'reaction38',
reactants = ['CHCH3(T)(95)', 'C=[C]C[CH]C(2608)'],
products = ['C=C([CH]C)C[CH]C(24171)'],
transitionState = 'TS38',
kinetics = Arrhenius(A=(1.06732e+06,'m^3/(mol*s)'), n=0.472793, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [Cd_rad/NonDe;Birad]
Euclidian distance = 3.0
family: Birad_R_Recombination
Ea raised from -3.5 to 0 kJ/mol."""),
)
network(
label = '4244',
isomers = [
'C=C([CH]C)C[CH]C(24171)',
],
reactants = [
('C3H6(72)', 'CH3CHCCH2(18175)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '4244',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
| [
"[email protected]"
] | |
f7de7c2ef755e5893d687912c0b74ed7148a8d02 | 6b453d913b1ae6697da738ddae9df013e8128d91 | /app/members/urls.py | 1a117fa9f25eacf78656a78f5d5d3ef9928e11ec | [] | no_license | moorekwon/instagram | 9703ecb1aed460ddec685c0bd06fe0fac3807548 | aaeca79f0d2765a24dd780bb12848c2c7b76a009 | refs/heads/master | 2022-12-12T18:13:37.536048 | 2020-02-03T08:33:16 | 2020-02-03T08:33:16 | 229,711,643 | 0 | 0 | null | 2022-12-08T03:32:50 | 2019-12-23T08:39:38 | Jupyter Notebook | UTF-8 | Python | false | false | 267 | py | from django.urls import path
from . import views
app_name = 'members'
urlpatterns = [
path('login/', views.login_view, name='login'),
path('logout/', views.logout_view, name='logout-view'),
path('naver-login/', views.naver_login, name='naver-login')
]
| [
"[email protected]"
] | |
4b26d2288fe6cceaed839816ed06bdce9f6e52d8 | f0ae65bddedea44b1e66f3d235a901e62afae3f2 | /macgyyver_stuff/parse_input.py | 9de1a35ebd5d51857f29e74f682675a2370a7704 | [] | no_license | Brandon-Valley/my_movie_tools | d0a2ba91cda054c1b68709f1a9082028842e83a1 | 371742bfeaa0cfa2985ce06a6865f6ae09445029 | refs/heads/master | 2023-01-10T00:06:48.696103 | 2022-12-28T14:09:20 | 2022-12-28T14:09:20 | 221,057,903 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 619 | py |
import pyperclip
INPUT_PATH = 'input.txt'
def read_text_file(file_path):
with open(file_path, 'r', encoding='utf-8') as text_file: # can throw FileNotFoundError
result = tuple(l.rstrip() for l in text_file.readlines())
return result
raw_in = read_text_file(INPUT_PATH)
print(raw_in)
in_str = ''
for line in raw_in:
in_str += line
print(in_str)
s_raw_in = in_str.split('"')
print(s_raw_in)
e_l = []
for elm_num, elm in enumerate(s_raw_in):
if elm_num % 2 != 0:
e_l.append(elm)
print(e_l)
pyperclip.copy(str(e_l))
spam = pyperclip.paste()
| [
"[email protected]"
] | |
dfd28d04bd20cdbae0bd324f5eaf1c036da10434 | 480e33f95eec2e471c563d4c0661784c92396368 | /RecoMuon/MuonIdentification/python/me0MuonConverter_cfi.py | 83b6632c7cca09219c2dcc76760b07daa2dbaeab | [
"Apache-2.0"
] | permissive | cms-nanoAOD/cmssw | 4d836e5b76ae5075c232de5e062d286e2026e8bd | 4eccb8a758b605875003124dd55ea58552b86af1 | refs/heads/master-cmsswmaster | 2021-01-23T21:19:52.295420 | 2020-08-27T08:01:20 | 2020-08-27T08:01:20 | 102,867,729 | 7 | 14 | Apache-2.0 | 2022-05-23T07:58:09 | 2017-09-08T14:03:57 | C++ | UTF-8 | Python | false | false | 98 | py | import FWCore.ParameterSet.Config as cms
me0MuonConverting = cms.EDProducer("ME0MuonConverter")
| [
"[email protected]"
] | |
724778ba9809a4764fe8fb9db59911050b386395 | 8329282a8fda056d705c1af6dbcd0de1ed7ca25e | /.history/textutiles/textutiles/views_20210522223732.py | 629f8ff15d006894a027b5cc2f0bacbc0ca08e6f | [] | no_license | ritikalohia/Django-beginners- | c069b16867407ef883bb00c6faf4f601921c118a | 829e28ab25201853de5c71a10ceff30496afea52 | refs/heads/main | 2023-05-04T03:34:29.082656 | 2021-05-22T17:38:21 | 2021-05-22T17:38:21 | 369,869,599 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,627 | py | #created
from django.http import HttpResponse
from django.shortcuts import render
def index(request):
#params = {'name' : 'Ritika', 'place' : 'Mars'}
return render(request, 'index.html')
#return HttpResponse("Home")
def contact(request):
return render(request, 'contact.html')
def about(request):
return render(request, 'about_us.html')
def analyze(request):
#get the text in head
djtext = request.GET.post('text', 'default' )
#check checkbox values
removepunc = request.GET.get('removepunc', 'off')
fullcaps = request.GET.get('fullcaps', 'off')
newlineremover = request.GET.get('newlineremover', 'off')
spaceremover = request.GET.get('spaceremover', 'off'),
charcount = request.GET.get('charcount', 'off')
if removepunc == "on":
#analyzed = djtext
punctuations = '''!()-[]{};:'"\,<>./?@#$%^&*_'''
analyzed = ""
for char in djtext:
if char not in punctuations:
analyzed = analyzed + char
params ={'purpose':'removed punctuations', 'analyzed_text': analyzed}
#analyze the text
return render(request, 'analyze.html', params)
elif(fullcaps == "on"):
analyzed =""
for char in djtext:
analyzed = analyzed + char.upper()
params ={'purpose':'changed to UPPERCASE', 'analyzed_text': analyzed}
#analyze the text
djtext = analyzed
#return render(request, 'analyze.html', params)
if(newlineremover== "on"):
analyzed =""
for char in djtext:
if char != '\n' and char !="\r":
analyzed = analyzed + char
params ={'purpose':'Removed new lines', 'analyzed_text': analyzed}
#analyze the text
djtext = analyzed
#return render(request, 'analyze.html', params)
if(spaceremover== "on"):
analyzed =""
for index, char in enumerate(djtext):
if not djtext[index] == " " and djtext[index+1]==" ":
analyzed = analyzed + char
params ={'purpose':'extra space removed', 'analyzed_text': analyzed}
#analyze the text
djtext = analyzed
#return render(request, 'analyze.html', params)
if(charcount== "on"):
a=0
for char in djtext:
a = a + 1
params ={'purpose':'extra space removed', 'analyzed_text': a}
#analyze the text
#return render(request, 'analyze.html', params)
else:
return HttpResponse("Error")
# def capfirst(request):
# return HttpResponse("capitalize first") | [
"[email protected]"
] | |
20aa6256635712125cec7cde82d9edd48783816b | 58a686c0a752db0f9084659a7f93d5b4f35a065c | /web_idea/apps.py | 3ad06d0a73b08e922fed17edcaef47f32b83f3ed | [
"MIT"
] | permissive | Dimas4/Web-IDEA-Python-Django | 5322ae013eb9b6803c175886f152654ed9469402 | 9cf17150d422979470e3d865a381309a83d8e875 | refs/heads/master | 2020-04-10T04:45:33.843338 | 2018-12-07T11:58:34 | 2018-12-07T11:58:34 | 160,808,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 90 | py | from django.apps import AppConfig
class WebIdeaConfig(AppConfig):
name = 'web_idea'
| [
"[email protected]"
] | |
c6a0f515e4061baa17af3a79b41c463f25758ff0 | 92429015d9a1f1cea9b9bf2c9f1a8a7a07586af5 | /option.py | 6e7b7892e37aa810a008bdf082451b08034a0125 | [] | no_license | arthur-qiu/adv_vis | 46a953ce6c3d562137c8e566bc9b523e25bc5bbd | ba46c00cf38ca5186d7db84844892036ed714eaf | refs/heads/master | 2021-01-03T23:00:45.065108 | 2020-04-05T03:47:01 | 2020-04-05T03:47:01 | 240,272,320 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,099 | py | import argparse
import os
class BaseOptions():
def __init__(self):
self.parser = argparse.ArgumentParser(description='Trains a CIFAR Classifier',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
self.initialized = False
def initialize(self):
self.parser.add_argument('--dataset', type=str, default='cifar10', choices=['cifar10', 'cifar100'],
help='Choose between CIFAR-10, CIFAR-100.')
self.parser.add_argument('--model', '-m', type=str, default='wrn',
choices=['wrn'], help='Choose architecture.')
# Optimization options
self.parser.add_argument('--epochs', '-e', type=int, default=50, help='Number of epochs to train.')
self.parser.add_argument('--start_epoch', type=int, default=1, help='The start epoch to train. Design for restart.')
self.parser.add_argument('--learning_rate', '-lr', type=float, default=0.1, help='The initial learning rate.')
self.parser.add_argument('--batch_size', '-b', type=int, default=128, help='Batch size.')
self.parser.add_argument('--test_bs', type=int, default=128)
self.parser.add_argument('--momentum', type=float, default=0.9, help='Momentum.')
self.parser.add_argument('--decay', '-d', type=float, default=0.0005, help='Weight decay (L2 penalty).')
self.parser.add_argument('--epoch_step', default='[40,42,44,46,48]', type=str,
help='json list with epochs to drop lr on')
self.parser.add_argument('--lr_decay_ratio', default=0.2, type=float)
# Checkpoints
self.parser.add_argument('--save', '-s', type=str, default='./logs/cifar10_adv', help='Folder to save checkpoints.')
self.parser.add_argument('--load', '-l', type=str, default='', help='Checkpoint path to resume / test.')
self.parser.add_argument('--test', '-t', action='store_true', help='Test only flag.')
self.parser.add_argument('--dataroot', default='.', type=str)
# Acceleration
self.parser.add_argument('--ngpu', type=int, default=1, help='0 = CPU.')
self.parser.add_argument('--prefetch', type=int, default=1, help='Pre-fetching threads.')
# Adversarial setting
self.parser.add_argument('--epsilon', type=float, default=8 / 255,
help='perturbation')
self.parser.add_argument('--num_steps', type=int, default=7,
help='perturb number of steps')
self.parser.add_argument('--step_size', type=float, default=2 / 255,
help='perturb step size')
self.parser.add_argument('--test_num_steps', type=int, default=20,
help='test perturb number of steps')
self.parser.add_argument('--test_step_size', type=float, default=2 / 255,
help='test perturb step size')
# Others
self.parser.add_argument('--random_seed', type=int, default=1)
def parse(self, save=True):
if not self.initialized:
self.initialize()
self.opt = self.parser.parse_args()
args = vars(self.opt)
print('------------ Options -------------')
for k, v in sorted(args.items()):
print('%s: %s' % (str(k), str(v)))
print('-------------- End ----------------')
# save to the disk
# Make save directory
if not os.path.exists(self.opt.save):
os.makedirs(self.opt.save)
if not os.path.isdir(self.opt.save):
raise Exception('%s is not a dir' % self.opt.save)
if save and not self.opt.test:
file_name = os.path.join(self.opt.save, 'opt.txt')
with open(file_name, 'wt') as opt_file:
opt_file.write('------------ Options -------------\n')
for k, v in sorted(args.items()):
opt_file.write('%s: %s\n' % (str(k), str(v)))
opt_file.write('-------------- End ----------------\n')
return self.opt
| [
"Arthur"
] | Arthur |
5e6eab96a36af8362b1089b13514cebebf213f95 | 11812a0cc7b818292e601ecdd4aa4c4e03d131c5 | /100days_of_python/day32/main.py | 2d1a1c5e6332bb4dae8a588642e9e2d964c7be13 | [] | no_license | SunshineFaxixi/Python_Learning | f1e55adcfa898489cc9146ccfb220f0b48a31a22 | ab3ca44d013311b6de02124091acc4c36a83c4d9 | refs/heads/master | 2021-08-16T05:47:29.963118 | 2021-01-04T13:48:30 | 2021-01-04T13:48:30 | 238,857,341 | 1 | 0 | null | 2020-03-03T13:53:08 | 2020-02-07T06:21:46 | HTML | UTF-8 | Python | false | false | 1,364 | py | ##################### Extra Hard Starting Project ######################
import pandas
from datetime import datetime
import os
import random
import smtplib
MY_EMAIL = "[email protected]"
MY_PASSWORD = "TXHTVGKIOLEHXVCI"
today = datetime.now()
today_tuple = (today.month, today.day)
all_birth_info = pandas.read_csv("birthdays.csv")
birthday_dict = {(data_row["month"], data_row["day"]): data_row for (index, data_row) in all_birth_info.iterrows()}
# 2. Check if today matches a birthday in the birthdays.csv
if today_tuple in birthday_dict:
# 3. If step 2 is true, pick a random letter from letter templates and replace the [NAME] with the person's actual name from birthdays.csv
birthday_person = birthday_dict[today_tuple]
file_path = f"letter_templates/letter_{random.randint(1, 3)}.txt"
with open(file_path) as data:
content = data.read()
content = content.replace("[NAME]", birthday_person["name"])
# print(content)
# 4. Send the letter generated in step 3 to that person's email address.
with smtplib.SMTP("smtp.163.com") as connection:
connection.starttls()
connection.login(user=MY_EMAIL, password=MY_PASSWORD)
connection.sendmail(
from_addr=MY_EMAIL,
to_addrs=birthday_person["email"],
msg=f"Subject: Happy Birthday!\n\n{content}"
)
| [
"[email protected]"
] | |
53e8ea169d0cfd5c2042f9ade08153f4669354fc | 65b4522c04c2be071c2d42095956fe950fe1cebe | /inversions/inversion10/iter2/run5/analysis/pred_disp/create_predicted_disp_database.py | 608cb3ba2bafea964917232a2b235b12007f7f0a | [] | no_license | geodesy/viscojapan | ac0cd93f7a2134cd2651623b94879dcc21c0c46a | 03e70265b56eb5994e73bcb6066f0be338e42f27 | refs/heads/master | 2021-03-03T18:19:07.779601 | 2015-07-16T03:50:49 | 2015-07-16T03:50:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 740 | py | import sqlite3
import numpy as np
import viscojapan as vj
pred = vj.inv.DispPred(
file_G0 = '../../../green_function/G0_He50km_VisM6.3E18_Rake83.h5',
result_file = '../../outs/nrough_05_naslip_11.h5',
fault_file = '../../../fault_model/fault_bott80km.h5',
files_Gs = ['../../../green_function/G1_He50km_VisM1.0E19_Rake83.h5',
'../../../green_function/G2_He60km_VisM6.3E18_Rake83.h5',
'../../../green_function/G3_He50km_VisM6.3E18_Rake90.h5'
],
nlin_par_names = ['log10(visM)','log10(He)','rake'],
file_incr_slip0 = '../../slip0/v1/slip0.h5',
)
writer = vj.inv.PredDispToDatabaseWriter(
pred_disp = pred
)
writer.create_database()
writer.insert_all()
| [
"[email protected]"
] | |
e254aa45d97a2f3ff329c8b06be41ad5a4e0aec5 | 3acb90a1e97a0e851c6e4b1b57dda78ec5e3e3b4 | /problems/deep_copy_graph.py | 0b0caab3f09a04fb8519c76e677dd80b5c5b183b | [] | no_license | jhyang12345/algorithm-problems | fea3c6498cff790fc4932404b5bbab08a6d4a627 | 704355013de9965ec596d2e0115fd2ca9828d0cb | refs/heads/master | 2023-05-15T10:26:52.685471 | 2021-06-01T14:57:38 | 2021-06-01T14:57:38 | 269,333,379 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,641 | py | # Given a node in a connected directional graph, create a copy of it.
#
# Here's an example and some starter code.
class Node:
def __init__(self, value, adj=None):
self.value = value
self.adj = adj
# Variable to help print graph
self._print_visited = set()
if self.adj is None:
self.adj = []
# Able to print graph
def __repr__(self):
if self in self._print_visited:
return ''
else:
self._print_visited.add(self)
final_str = ''
for n in self.adj:
final_str += f'{n}\n'
self._print_visited.remove(self)
return final_str + f'({self.value}, ({[n.value for n in self.adj]}))'
def deep_copy_graph(graph_node, visited=None):
dummy_node = Node(0)
queue = [graph_node, dummy_node]
graph = {}
visited = [graph_node, dummy_node]
dummy_map = {}
while queue:
cur = queue.pop(0)
dummy = queue.pop(0)
dummy_map[cur] = dummy
dummy.value = cur.value
visited.append(cur)
for node in cur.adj:
if node not in visited:
queue.append(node)
new_dummy = Node(0)
queue.append(new_dummy)
dummy.adj.append(new_dummy)
else:
dummy.adj.append(dummy_map[node])
return dummy_node
n5 = Node(5)
n4 = Node(4)
n3 = Node(3, [n4])
n2 = Node(2)
n1 = Node(1, [n5])
n5.adj = [n3]
n4.adj = [n3, n2]
n2.adj = [n4]
graph_copy = deep_copy_graph(n1)
print(graph_copy)
# (2, ([4]))
# (4, ([3, 2]))
# (3, ([4]))
# (5, ([3]))
# (1, ([5]))
| [
"[email protected]"
] | |
55bb3d82b80185533da7d4c85f2c8e6589933ab4 | cd4eb25911d3e3b092aa97aaa7b8fbba6c3a0704 | /lang/python/asyncio/chain.py | aa8d41fb55cb2b5319e1e90b9a6e8a96e55ad630 | [
"MIT"
] | permissive | liuyang1/test | 29bb142982d2ef0d79b71e8fe5f5e0d51ec5258e | 9a154e0161a1a33baad53f7223ee72e702532001 | refs/heads/master | 2023-08-05T08:56:50.526414 | 2023-07-21T05:49:53 | 2023-07-21T11:16:09 | 26,949,326 | 9 | 1 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | import asyncio
@asyncio.coroutine
def compute(x, y):
print("Compute %s + %s ..." % (x, y))
# yield from asyncio.sleep(1.0)
return x + y
@asyncio.coroutine
def print_sum(x, y):
result = yield from compute(x, y)
print("%s + %s = %s" % (x, y, result))
loop = asyncio.get_event_loop()
loop.run_until_complete(print_sum(1, 2))
loop.run_until_complete(print_sum(3, 2))
loop.close()
| [
"[email protected]"
] | |
6000dedcf91921ea9a5a6cba05ff8fe17f2ae918 | 221d1ad342677d2fac8aa3f8d5c60e059a6316c9 | /pm4py/objects/log/util/dataframe_utils.py | e8318a1daaeaa367f7ae496fe27ab3a705aca2da | [] | no_license | niklasadams/explainable_concept_drift_pm | 06ff651fbdebece4adf96f94bfb4d1026da14c48 | 6bf84d727ab0bae76716a04ad28c7de73250c89d | refs/heads/main | 2023-08-26T18:21:49.955080 | 2021-10-29T18:53:48 | 2021-10-29T18:53:48 | 314,514,571 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,643 | py | from pm4py.util import constants
from pm4py.objects.log.log import EventStream
from pm4py.objects.conversion.log import converter as log_converter
import pandas as pd
from pm4py.util.vers_checker import check_pandas_ge_024
from enum import Enum
from pm4py.util import exec_utils
from pm4py.util import points_subset
from pm4py.util import xes_constants
LEGACY_PARQUET_TP_REPLACER = "AAA"
LEGACY_PARQUET_CASECONCEPTNAME = "caseAAAconceptAAAname"
class Parameters(Enum):
PARTITION_COLUMN = "partition_column"
CASE_ID_KEY = constants.PARAMETER_CONSTANT_CASEID_KEY
MANDATORY_ATTRIBUTES = "mandatory_attributes"
MAX_NO_CASES = "max_no_cases"
MIN_DIFFERENT_OCC_STR_ATTR = 5
MAX_DIFFERENT_OCC_STR_ATTR = 50
def insert_partitioning(df, num_partitions, parameters=None):
"""
Insert the partitioning in the specified dataframe
Parameters
-------------
df
Dataframe
num_partitions
Number of partitions
parameters
Parameters of the algorithm
Returns
-------------
df
Partitioned dataframe
"""
if parameters is None:
parameters = {}
case_id_key = exec_utils.get_param_value(Parameters.CASE_ID_KEY, parameters, constants.CASE_CONCEPT_NAME)
partition_column = exec_utils.get_param_value(Parameters.PARTITION_COLUMN, parameters, "@@partitioning")
df[partition_column] = df[case_id_key].rank(method='dense', ascending=False).astype(int) % num_partitions
return df
def legacy_parquet_support(df, parameters=None):
"""
For legacy support, Parquet files columns could not contain
a ":" that has been arbitrarily replaced by a replacer string.
This string substitutes the replacer to the :
Parameters
---------------
dataframe
Dataframe
parameters
Parameters of the algorithm
"""
if parameters is None:
parameters = {}
df.columns = [x.replace(LEGACY_PARQUET_TP_REPLACER, ":") for x in df.columns]
return df
def table_to_stream(table, parameters=None):
"""
Converts a Pyarrow table to an event stream
Parameters
------------
table
Pyarrow table
parameters
Possible parameters of the algorithm
"""
if parameters is None:
parameters = {}
dict0 = table.to_pydict()
keys = list(dict0.keys())
# for legacy format support
if LEGACY_PARQUET_CASECONCEPTNAME in keys:
for key in keys:
dict0[key.replace(LEGACY_PARQUET_TP_REPLACER, ":")] = dict0.pop(key)
stream = EventStream([dict(zip(dict0, i)) for i in zip(*dict0.values())])
return stream
def table_to_log(table, parameters=None):
"""
Converts a Pyarrow table to an event log
Parameters
------------
table
Pyarrow table
parameters
Possible parameters of the algorithm
"""
if parameters is None:
parameters = {}
stream = table_to_stream(table, parameters=parameters)
return log_converter.apply(stream, parameters=parameters)
def convert_timestamp_columns_in_df(df, timest_format=None, timest_columns=None):
"""
Convert all dataframe columns in a dataframe
Parameters
-----------
df
Dataframe
timest_format
(If provided) Format of the timestamp columns in the CSV file
timest_columns
Columns of the CSV that shall be converted into timestamp
Returns
------------
df
Dataframe with timestamp columns converted
"""
needs_conversion = check_pandas_ge_024()
for col in df.columns:
if timest_columns is None or col in timest_columns:
if df[col].dtype == 'object':
try:
if timest_format is None:
if needs_conversion:
df[col] = pd.to_datetime(df[col], utc=True)
else:
df[col] = pd.to_datetime(df[col])
else:
if needs_conversion:
df[col] = pd.to_datetime(df[col], utc=True, format=timest_format)
else:
df[col] = pd.to_datetime(df[col])
except ValueError:
# print("exception converting column: "+str(col))
pass
return df
def sample_dataframe(df, parameters=None):
"""
Sample a dataframe on a given number of cases
Parameters
--------------
df
Dataframe
parameters
Parameters of the algorithm, including:
- Parameters.CASE_ID_KEY
- Parameters.CASE_ID_TO_RETAIN
Returns
-------------
sampled_df
Sampled dataframe
"""
if parameters is None:
parameters = {}
case_id_key = exec_utils.get_param_value(Parameters.CASE_ID_KEY, parameters, constants.CASE_CONCEPT_NAME)
max_no_cases = exec_utils.get_param_value(Parameters.MAX_NO_CASES, parameters, 100)
case_ids = list(df[case_id_key].unique())
case_id_to_retain = points_subset.pick_chosen_points_list(min(max_no_cases, len(case_ids)), case_ids)
return df[df[case_id_key].isin(case_id_to_retain)]
def automatic_feature_selection_df(df, parameters=None):
"""
Performs an automatic feature selection on dataframes,
keeping the features useful for ML purposes
Parameters
---------------
df
Dataframe
parameters
Parameters of the algorithm
Returns
---------------
featured_df
Dataframe with only the features that have been selected
"""
if parameters is None:
parameters = {}
case_id_key = exec_utils.get_param_value(Parameters.CASE_ID_KEY, parameters, constants.CASE_CONCEPT_NAME)
mandatory_attributes = exec_utils.get_param_value(Parameters.MANDATORY_ATTRIBUTES, parameters,
set(df.columns).intersection(
{constants.CASE_CONCEPT_NAME, xes_constants.DEFAULT_NAME_KEY,
xes_constants.DEFAULT_TIMESTAMP_KEY}))
min_different_occ_str_attr = exec_utils.get_param_value(Parameters.MIN_DIFFERENT_OCC_STR_ATTR, parameters, 5)
max_different_occ_str_attr = exec_utils.get_param_value(Parameters.MAX_DIFFERENT_OCC_STR_ATTR, parameters, 50)
cols_dtypes = {x: str(df[x].dtype) for x in df.columns}
other_attributes_to_retain = set()
no_all_cases = df[case_id_key].nunique()
for x, y in cols_dtypes.items():
attr_df = df.dropna(subset=[x])
this_cases = attr_df[case_id_key].nunique()
# in any case, keep attributes that appears at least once per case
if this_cases == no_all_cases:
if "float" in y or "int" in y:
# (as in the classic log version) retain always float/int attributes
other_attributes_to_retain.add(x)
elif "object" in y:
# (as in the classic log version) keep string attributes if they have enough variability, but not too much
# (that would be hard to explain)
unique_val_count = df[x].nunique()
if min_different_occ_str_attr <= unique_val_count <= max_different_occ_str_attr:
other_attributes_to_retain.add(x)
else:
# not consider the attribute after this feature selection if it has other types (for example, date)
pass
attributes_to_retain = mandatory_attributes.union(other_attributes_to_retain)
return df[attributes_to_retain]
| [
"[email protected]"
] | |
40c34bd9c99fb2039166995b23dec6a86c82f436 | b02c88bcad352811d22cadacd2aa573c426a2ca3 | /scrapers/settings.py | 06a6ae3c2620e357e3388b52d97f309471c0bf4b | [
"Apache-2.0"
] | permissive | frankier/ties445 | 9e24c3e415b4a07584c41e0e4a3f241b133463b8 | 4ceee5390d81aedc0fb3904803797584dd5084b8 | refs/heads/master | 2020-12-31T07:18:42.938988 | 2016-05-17T18:45:14 | 2016-05-17T18:45:14 | 56,767,440 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,085 | py | # -*- coding: utf-8 -*-
# Scrapy settings for scrapers project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'scrapers'
SPIDER_MODULES = ['scrapers.spiders']
NEWSPIDER_MODULE = 'scrapers.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'scrapers (+http://www.yourdomain.com)'
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS=32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY=3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN=16
#CONCURRENT_REQUESTS_PER_IP=16
# Disable cookies (enabled by default)
#COOKIES_ENABLED=False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED=False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'scrapers.middlewares.MyCustomSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
DOWNLOADER_MIDDLEWARES = {
'scrapy.downloadermiddlewares.decompression.DecompressionMiddleware': 1,
'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware': 1,
}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'scrapers.pipelines.SomePipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
# NOTE: AutoThrottle will honour the standard settings for concurrency and delay
#AUTOTHROTTLE_ENABLED=True
# The initial download delay
#AUTOTHROTTLE_START_DELAY=5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY=60
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG=False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED=True
#HTTPCACHE_EXPIRATION_SECS=0
#HTTPCACHE_DIR='httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES=[]
#HTTPCACHE_STORAGE='scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"[email protected]"
] | |
8dc513728068e9e929518340bb44a7718efc33eb | 3122ac39f1ce0a882b48293a77195476299c2a3b | /clients/python/generated/swaggyjenkins/models/extension_class_container_impl1links.py | 71b82d350f0fa80dde4f8cb80c78f8b7b2e3c3c7 | [
"MIT"
] | permissive | miao1007/swaggy-jenkins | 4e6fe28470eda2428cbc584dcd365a21caa606ef | af79438c120dd47702b50d51c42548b4db7fd109 | refs/heads/master | 2020-08-30T16:50:27.474383 | 2019-04-10T13:47:17 | 2019-04-10T13:47:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,766 | py | # coding: utf-8
"""
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification # noqa: E501
OpenAPI spec version: 1.1.1
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class ExtensionClassContainerImpl1links(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'_self': 'Link',
'_class': 'str'
}
attribute_map = {
'_self': 'self',
'_class': '_class'
}
def __init__(self, _self=None, _class=None): # noqa: E501
"""ExtensionClassContainerImpl1links - a model defined in OpenAPI""" # noqa: E501
self.__self = None
self.__class = None
self.discriminator = None
if _self is not None:
self._self = _self
if _class is not None:
self._class = _class
@property
def _self(self):
"""Gets the _self of this ExtensionClassContainerImpl1links. # noqa: E501
:return: The _self of this ExtensionClassContainerImpl1links. # noqa: E501
:rtype: Link
"""
return self.__self
@_self.setter
def _self(self, _self):
"""Sets the _self of this ExtensionClassContainerImpl1links.
:param _self: The _self of this ExtensionClassContainerImpl1links. # noqa: E501
:type: Link
"""
self.__self = _self
@property
def _class(self):
"""Gets the _class of this ExtensionClassContainerImpl1links. # noqa: E501
:return: The _class of this ExtensionClassContainerImpl1links. # noqa: E501
:rtype: str
"""
return self.__class
@_class.setter
def _class(self, _class):
"""Sets the _class of this ExtensionClassContainerImpl1links.
:param _class: The _class of this ExtensionClassContainerImpl1links. # noqa: E501
:type: str
"""
self.__class = _class
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ExtensionClassContainerImpl1links):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
e3e0ff71c09f66324bba160b6a4edccc40d93fff | ddc5aa77203bf76cd789c173dffbc382ed8ef004 | /test/app_test/master.py | f1fe1995de473cf239f7fc143c31029ce2d5bca1 | [] | no_license | phroiland/FinBiotic | 0b8183ce9f97c3fc4b1f7e20decc3472bffe8800 | a30ef2e979b230e5424fd25ef7dd1fb49bbd5245 | refs/heads/master | 2023-08-18T15:26:15.948262 | 2023-08-15T15:13:23 | 2023-08-15T15:13:23 | 93,895,989 | 2 | 2 | null | 2023-03-01T20:08:37 | 2017-06-09T20:52:02 | Python | UTF-8 | Python | false | false | 4,105 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Mon May 29 13:30:38 2017
@author: jonfroiland
"""
import sys
import argparse
import oandapyV20
import oandapyV20.endpoints.positions as openPos
# Data, Price, and Strategy Imports
import settings
import common.config
import common.args
from stream.streamingData import StreamingData
from stream.view import mid_string, heartbeat_to_string, instrument_string
from account.balance import Balance
from strategy.breakout import Breakout
from strategy.spreads import Spreads
from strategy.strategy import Strategy
from pivots.pivotImports import PivotImports
# from view import bid_string, ask_string, price_to_string
from datetime import datetime
import pandas as pd
pd.set_option('display.large_repr', 'truncate')
pd.set_option('display.max_columns', 0)
def main():
print "------ System online -------", datetime.now()
parser = argparse.ArgumentParser()
common.config.add_argument(parser)
parser.add_argument('--instrument', "-i", type=common.args.instrument,
required=True, action="append",
help="Instrument to get prices for")
parser.add_argument('--snapshot', action="store_true", default=True,
help="Request an initial snapshot")
parser.add_argument('--no-snapshot', dest="snapshot", action="store_false",
help="Do not request an initial snapshot")
parser.add_argument('--show-heartbeats', "-s", action='store_true',
default=False, help="display heartbeats")
args = parser.parse_args()
# print sys.argv[2]
account_id = args.config.active_account
api = args.config.create_streaming_context()
account_api = args.config.create_context()
response = api.pricing.stream(account_id, snapshot=args.snapshot,
instruments=",".join(args.instrument))
dfD = PivotImports(sys.argv[2]).daily()
# dfW = p.weekly()
balance = Balance(account_api, account_id).balance()
df = pd.DataFrame([])
for msg_type, msg in response.parts():
if msg_type == "pricing.Heartbeat" and args.show_heartbeats:
print heartbeat_to_string(msg)
if msg_type == "pricing.Price":
sd = StreamingData(datetime.now(), instrument_string(msg),
mid_string(msg), account_api, account_id, 's',
'5min', balance)
df = df.append(sd.df())
sd.resample(df)
print "df:", df.shape[0], "minuteData:", sd.minuteData().shape[0]
# print sd.minuteData(),'\n'
if sd.minuteData().shape[0] < 20:
continue
else:
client = oandapyV20.API(settings.ACCESS_TOKEN)
r = openPos.OpenPositions(accountID=account_id)
client.request(r)
openTrades = []
for i in r.response['positions']:
trades = i['instrument']
openTrades.append(trades)
print 'Open Trades', openTrades
if instrument_string(msg) in openTrades:
continue
else:
try:
b = Breakout(sd.minuteData())
breakout = b.breakout()
# print 'Breakout Units:',breakout
s = Spreads(dfD, mid_string(msg))
pivot, rl1, rl2, rl3, sl1, sl2, sl3 = s.spreads()
rate1, rate2 = s.spreads_out()
strat = Strategy(account_api, account_id,
instrument_string(msg), dfD,
mid_string(msg), breakout, pivot, rl1,
rl2, rl3, sl1, sl2, sl3, rate1, rate2)
strat.res_check()
strat.sup_check()
except Exception as e:
print e
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
8d06548df5f6398354e80696bdcd4de55ab84d3a | f44e4485385296f4d1de2032c64c76de37ec5007 | /pyatv/mrp/protobuf/DeviceInfoMessage_pb2.py | f18237f3e98c6570af7c0facc2de477cda9de067 | [
"MIT"
] | permissive | kdschlosser/pyatv | 370d0a35e39623b8e8e6a087c675ec47aa50fb16 | fa32dab9ad3c4adffdc944ed78427f6c724074f5 | refs/heads/master | 2022-06-20T06:58:13.608441 | 2020-05-11T04:57:55 | 2020-05-11T06:22:23 | 264,143,600 | 1 | 0 | MIT | 2020-05-15T08:48:06 | 2020-05-15T08:48:05 | null | UTF-8 | Python | false | true | 17,207 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pyatv/mrp/protobuf/DeviceInfoMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from pyatv.mrp.protobuf import ProtocolMessage_pb2 as pyatv_dot_mrp_dot_protobuf_dot_ProtocolMessage__pb2
from pyatv.mrp.protobuf import Common_pb2 as pyatv_dot_mrp_dot_protobuf_dot_Common__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='pyatv/mrp/protobuf/DeviceInfoMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n*pyatv/mrp/protobuf/DeviceInfoMessage.proto\x1a(pyatv/mrp/protobuf/ProtocolMessage.proto\x1a\x1fpyatv/mrp/protobuf/Common.proto\"\x98\x07\n\x11\x44\x65viceInfoMessage\x12\x18\n\x10uniqueIdentifier\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12\x1a\n\x12localizedModelName\x18\x03 \x01(\t\x12\x1a\n\x12systemBuildVersion\x18\x04 \x02(\t\x12#\n\x1b\x61pplicationBundleIdentifier\x18\x05 \x02(\t\x12 \n\x18\x61pplicationBundleVersion\x18\x06 \x01(\t\x12\x17\n\x0fprotocolVersion\x18\x07 \x02(\x05\x12 \n\x18lastSupportedMessageType\x18\x08 \x01(\r\x12\x1d\n\x15supportsSystemPairing\x18\t \x01(\x08\x12\x15\n\rallowsPairing\x18\n \x01(\x08\x12\x11\n\tconnected\x18\x0b \x01(\x08\x12\x1e\n\x16systemMediaApplication\x18\x0c \x01(\t\x12\x13\n\x0bsupportsACL\x18\r \x01(\x08\x12\x1b\n\x13supportsSharedQueue\x18\x0e \x01(\x08\x12\x1e\n\x16supportsExtendedMotion\x18\x0f \x01(\x08\x12\x18\n\x10\x62luetoothAddress\x18\x10 \x01(\x0c\x12\x1a\n\x12sharedQueueVersion\x18\x11 \x01(\r\x12\x11\n\tdeviceUID\x18\x13 \x01(\t\x12\x1d\n\x15managedConfigDeviceID\x18\x14 \x01(\t\x12&\n\x0b\x64\x65viceClass\x18\x15 \x01(\x0e\x32\x11.DeviceClass.Enum\x12\x1a\n\x12logicalDeviceCount\x18\x16 \x01(\r\x12\x1a\n\x12tightlySyncedGroup\x18\x17 \x01(\x08\x12\x1a\n\x12isProxyGroupPlayer\x18\x18 \x01(\x08\x12\x14\n\x0ctightSyncUID\x18\x19 \x01(\t\x12\x10\n\x08groupUID\x18\x1a \x01(\t\x12\x11\n\tgroupName\x18\x1b \x01(\t\x12*\n\x0egroupedDevices\x18\x1c \x03(\x0b\x32\x12.DeviceInfoMessage\x12\x15\n\risGroupLeader\x18\x1d \x01(\x08\x12\x17\n\x0fisAirplayActive\x18\x1e \x01(\x08\x12 \n\x18systemPodcastApplication\x18\x1f \x01(\t\x12\x1c\n\x14\x65nderDefaultGroupUID\x18 \x01(\t\x12\x18\n\x10\x61irplayReceivers\x18! \x03(\t\x12\x11\n\tlinkAgent\x18\" \x01(\t:?\n\x11\x64\x65viceInfoMessage\x12\x10.ProtocolMessage\x18\x14 \x01(\x0b\x32\x12.DeviceInfoMessage'
,
dependencies=[pyatv_dot_mrp_dot_protobuf_dot_ProtocolMessage__pb2.DESCRIPTOR,pyatv_dot_mrp_dot_protobuf_dot_Common__pb2.DESCRIPTOR,])
DEVICEINFOMESSAGE_FIELD_NUMBER = 20
deviceInfoMessage = _descriptor.FieldDescriptor(
name='deviceInfoMessage', full_name='deviceInfoMessage', index=0,
number=20, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
_DEVICEINFOMESSAGE = _descriptor.Descriptor(
name='DeviceInfoMessage',
full_name='DeviceInfoMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='uniqueIdentifier', full_name='DeviceInfoMessage.uniqueIdentifier', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='DeviceInfoMessage.name', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='localizedModelName', full_name='DeviceInfoMessage.localizedModelName', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='systemBuildVersion', full_name='DeviceInfoMessage.systemBuildVersion', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='applicationBundleIdentifier', full_name='DeviceInfoMessage.applicationBundleIdentifier', index=4,
number=5, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='applicationBundleVersion', full_name='DeviceInfoMessage.applicationBundleVersion', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='protocolVersion', full_name='DeviceInfoMessage.protocolVersion', index=6,
number=7, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lastSupportedMessageType', full_name='DeviceInfoMessage.lastSupportedMessageType', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='supportsSystemPairing', full_name='DeviceInfoMessage.supportsSystemPairing', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='allowsPairing', full_name='DeviceInfoMessage.allowsPairing', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='connected', full_name='DeviceInfoMessage.connected', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='systemMediaApplication', full_name='DeviceInfoMessage.systemMediaApplication', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='supportsACL', full_name='DeviceInfoMessage.supportsACL', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='supportsSharedQueue', full_name='DeviceInfoMessage.supportsSharedQueue', index=13,
number=14, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='supportsExtendedMotion', full_name='DeviceInfoMessage.supportsExtendedMotion', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bluetoothAddress', full_name='DeviceInfoMessage.bluetoothAddress', index=15,
number=16, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sharedQueueVersion', full_name='DeviceInfoMessage.sharedQueueVersion', index=16,
number=17, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deviceUID', full_name='DeviceInfoMessage.deviceUID', index=17,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='managedConfigDeviceID', full_name='DeviceInfoMessage.managedConfigDeviceID', index=18,
number=20, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deviceClass', full_name='DeviceInfoMessage.deviceClass', index=19,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='logicalDeviceCount', full_name='DeviceInfoMessage.logicalDeviceCount', index=20,
number=22, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tightlySyncedGroup', full_name='DeviceInfoMessage.tightlySyncedGroup', index=21,
number=23, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='isProxyGroupPlayer', full_name='DeviceInfoMessage.isProxyGroupPlayer', index=22,
number=24, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tightSyncUID', full_name='DeviceInfoMessage.tightSyncUID', index=23,
number=25, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='groupUID', full_name='DeviceInfoMessage.groupUID', index=24,
number=26, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='groupName', full_name='DeviceInfoMessage.groupName', index=25,
number=27, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='groupedDevices', full_name='DeviceInfoMessage.groupedDevices', index=26,
number=28, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='isGroupLeader', full_name='DeviceInfoMessage.isGroupLeader', index=27,
number=29, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='isAirplayActive', full_name='DeviceInfoMessage.isAirplayActive', index=28,
number=30, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='systemPodcastApplication', full_name='DeviceInfoMessage.systemPodcastApplication', index=29,
number=31, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='enderDefaultGroupUID', full_name='DeviceInfoMessage.enderDefaultGroupUID', index=30,
number=32, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='airplayReceivers', full_name='DeviceInfoMessage.airplayReceivers', index=31,
number=33, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='linkAgent', full_name='DeviceInfoMessage.linkAgent', index=32,
number=34, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=122,
serialized_end=1042,
)
_DEVICEINFOMESSAGE.fields_by_name['deviceClass'].enum_type = pyatv_dot_mrp_dot_protobuf_dot_Common__pb2._DEVICECLASS_ENUM
_DEVICEINFOMESSAGE.fields_by_name['groupedDevices'].message_type = _DEVICEINFOMESSAGE
DESCRIPTOR.message_types_by_name['DeviceInfoMessage'] = _DEVICEINFOMESSAGE
DESCRIPTOR.extensions_by_name['deviceInfoMessage'] = deviceInfoMessage
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DeviceInfoMessage = _reflection.GeneratedProtocolMessageType('DeviceInfoMessage', (_message.Message,), {
'DESCRIPTOR' : _DEVICEINFOMESSAGE,
'__module__' : 'pyatv.mrp.protobuf.DeviceInfoMessage_pb2'
# @@protoc_insertion_point(class_scope:DeviceInfoMessage)
})
_sym_db.RegisterMessage(DeviceInfoMessage)
deviceInfoMessage.message_type = _DEVICEINFOMESSAGE
pyatv_dot_mrp_dot_protobuf_dot_ProtocolMessage__pb2.ProtocolMessage.RegisterExtension(deviceInfoMessage)
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
4449a9ba1f7077329a5da7221fd2c951aa9a4573 | ebcea394905df8222c257c8c6c469627a6e48095 | /PyQt5/object_detection/inputs_test.py | cc79131e3a02e54893093a7c803e84b4cb10687c | [] | no_license | valiok98/Python-Qt5-Tensorflow | 2773cfc2a0e569ed53cf3d90066885f17abe8c6a | e03ccc2884b687a36fbe47f5ff320837be3e217a | refs/heads/master | 2021-09-17T20:41:01.908602 | 2018-03-31T12:42:25 | 2018-03-31T12:42:25 | 103,644,683 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,083 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.tflearn.inputs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os
import numpy as np
import tensorflow as tf
import sys
sys.path.append("..")
import inputs
from core import preprocessor
from core import standard_fields as fields
from utils import config_util
FLAGS = tf.flags.FLAGS
def _get_configs_for_model(model_name):
"""Returns configurations for model."""
# TODO: Make sure these tests work fine outside google3.
fname = os.path.join(
FLAGS.test_srcdir,
('google3/third_party/tensorflow_models/'
'object_detection/samples/configs/' + model_name + '.config'))
label_map_path = os.path.join(FLAGS.test_srcdir,
('google3/third_party/tensorflow_models/'
'object_detection/data/pet_label_map.pbtxt'))
data_path = os.path.join(FLAGS.test_srcdir,
('google3/third_party/tensorflow_models/'
'object_detection/test_data/pets_examples.record'))
configs = config_util.get_configs_from_pipeline_file(fname)
return config_util.merge_external_params_with_configs(
configs,
train_input_path=data_path,
eval_input_path=data_path,
label_map_path=label_map_path)
class InputsTest(tf.test.TestCase):
def test_faster_rcnn_resnet50_train_input(self):
"""Tests the training input function for FasterRcnnResnet50."""
configs = _get_configs_for_model('faster_rcnn_resnet50_pets')
configs['train_config'].unpad_groundtruth_tensors = True
model_config = configs['model']
model_config.faster_rcnn.num_classes = 37
train_input_fn = inputs.create_train_input_fn(
configs['train_config'], configs['train_input_config'], model_config)
features, labels = train_input_fn()
self.assertAllEqual([None, None, 3],
features[fields.InputDataFields.image].shape.as_list())
self.assertEqual(tf.float32, features[fields.InputDataFields.image].dtype)
self.assertAllEqual([],
features[inputs.HASH_KEY].shape.as_list())
self.assertEqual(tf.int32, features[inputs.HASH_KEY].dtype)
self.assertAllEqual(
[None, 4],
labels[fields.InputDataFields.groundtruth_boxes].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_boxes].dtype)
self.assertAllEqual(
[None, model_config.faster_rcnn.num_classes],
labels[fields.InputDataFields.groundtruth_classes].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_classes].dtype)
self.assertAllEqual(
[None],
labels[fields.InputDataFields.groundtruth_weights].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_weights].dtype)
def test_faster_rcnn_resnet50_eval_input(self):
"""Tests the eval input function for FasterRcnnResnet50."""
configs = _get_configs_for_model('faster_rcnn_resnet50_pets')
model_config = configs['model']
model_config.faster_rcnn.num_classes = 37
eval_input_fn = inputs.create_eval_input_fn(
configs['eval_config'], configs['eval_input_config'], model_config)
features, labels = eval_input_fn()
self.assertAllEqual([1, None, None, 3],
features[fields.InputDataFields.image].shape.as_list())
self.assertEqual(tf.float32, features[fields.InputDataFields.image].dtype)
self.assertAllEqual(
[1, None, None, 3],
features[fields.InputDataFields.original_image].shape.as_list())
self.assertEqual(tf.uint8,
features[fields.InputDataFields.original_image].dtype)
self.assertAllEqual([1], features[inputs.HASH_KEY].shape.as_list())
self.assertEqual(tf.int32, features[inputs.HASH_KEY].dtype)
self.assertAllEqual(
[1, None, 4],
labels[fields.InputDataFields.groundtruth_boxes].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_boxes].dtype)
self.assertAllEqual(
[1, None, model_config.faster_rcnn.num_classes],
labels[fields.InputDataFields.groundtruth_classes].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_classes].dtype)
self.assertAllEqual(
[1, None],
labels[fields.InputDataFields.groundtruth_area].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_area].dtype)
self.assertAllEqual(
[1, None],
labels[fields.InputDataFields.groundtruth_is_crowd].shape.as_list())
self.assertEqual(
tf.bool, labels[fields.InputDataFields.groundtruth_is_crowd].dtype)
self.assertAllEqual(
[1, None],
labels[fields.InputDataFields.groundtruth_difficult].shape.as_list())
self.assertEqual(
tf.int32, labels[fields.InputDataFields.groundtruth_difficult].dtype)
def test_ssd_inceptionV2_train_input(self):
"""Tests the training input function for SSDInceptionV2."""
configs = _get_configs_for_model('ssd_inception_v2_pets')
model_config = configs['model']
model_config.ssd.num_classes = 37
batch_size = configs['train_config'].batch_size
train_input_fn = inputs.create_train_input_fn(
configs['train_config'], configs['train_input_config'], model_config)
features, labels = train_input_fn()
self.assertAllEqual([batch_size, 300, 300, 3],
features[fields.InputDataFields.image].shape.as_list())
self.assertEqual(tf.float32, features[fields.InputDataFields.image].dtype)
self.assertAllEqual([batch_size],
features[inputs.HASH_KEY].shape.as_list())
self.assertEqual(tf.int32, features[inputs.HASH_KEY].dtype)
self.assertAllEqual(
[batch_size],
labels[fields.InputDataFields.num_groundtruth_boxes].shape.as_list())
self.assertEqual(tf.int32,
labels[fields.InputDataFields.num_groundtruth_boxes].dtype)
self.assertAllEqual(
[batch_size, 50, 4],
labels[fields.InputDataFields.groundtruth_boxes].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_boxes].dtype)
self.assertAllEqual(
[batch_size, 50, model_config.ssd.num_classes],
labels[fields.InputDataFields.groundtruth_classes].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_classes].dtype)
self.assertAllEqual(
[batch_size, 50],
labels[fields.InputDataFields.groundtruth_weights].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_weights].dtype)
def test_ssd_inceptionV2_eval_input(self):
"""Tests the eval input function for SSDInceptionV2."""
configs = _get_configs_for_model('ssd_inception_v2_pets')
model_config = configs['model']
model_config.ssd.num_classes = 37
eval_input_fn = inputs.create_eval_input_fn(
configs['eval_config'], configs['eval_input_config'], model_config)
features, labels = eval_input_fn()
self.assertAllEqual([1, 300, 300, 3],
features[fields.InputDataFields.image].shape.as_list())
self.assertEqual(tf.float32, features[fields.InputDataFields.image].dtype)
self.assertAllEqual(
[1, None, None, 3],
features[fields.InputDataFields.original_image].shape.as_list())
self.assertEqual(tf.uint8,
features[fields.InputDataFields.original_image].dtype)
self.assertAllEqual([1], features[inputs.HASH_KEY].shape.as_list())
self.assertEqual(tf.int32, features[inputs.HASH_KEY].dtype)
self.assertAllEqual(
[1, None, 4],
labels[fields.InputDataFields.groundtruth_boxes].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_boxes].dtype)
self.assertAllEqual(
[1, None, model_config.ssd.num_classes],
labels[fields.InputDataFields.groundtruth_classes].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_classes].dtype)
self.assertAllEqual(
[1, None],
labels[fields.InputDataFields.groundtruth_area].shape.as_list())
self.assertEqual(tf.float32,
labels[fields.InputDataFields.groundtruth_area].dtype)
self.assertAllEqual(
[1, None],
labels[fields.InputDataFields.groundtruth_is_crowd].shape.as_list())
self.assertEqual(
tf.bool, labels[fields.InputDataFields.groundtruth_is_crowd].dtype)
self.assertAllEqual(
[1, None],
labels[fields.InputDataFields.groundtruth_difficult].shape.as_list())
self.assertEqual(
tf.int32, labels[fields.InputDataFields.groundtruth_difficult].dtype)
def test_predict_input(self):
"""Tests the predict input function."""
configs = _get_configs_for_model('ssd_inception_v2_pets')
predict_input_fn = inputs.create_predict_input_fn(
model_config=configs['model'])
serving_input_receiver = predict_input_fn()
image = serving_input_receiver.features[fields.InputDataFields.image]
receiver_tensors = serving_input_receiver.receiver_tensors[
inputs.SERVING_FED_EXAMPLE_KEY]
self.assertEqual([1, 300, 300, 3], image.shape.as_list())
self.assertEqual(tf.float32, image.dtype)
self.assertEqual(tf.string, receiver_tensors.dtype)
def test_error_with_bad_train_config(self):
"""Tests that a TypeError is raised with improper train config."""
configs = _get_configs_for_model('ssd_inception_v2_pets')
configs['model'].ssd.num_classes = 37
train_input_fn = inputs.create_train_input_fn(
train_config=configs['eval_config'], # Expecting `TrainConfig`.
train_input_config=configs['train_input_config'],
model_config=configs['model'])
with self.assertRaises(TypeError):
train_input_fn()
def test_error_with_bad_train_input_config(self):
"""Tests that a TypeError is raised with improper train input config."""
configs = _get_configs_for_model('ssd_inception_v2_pets')
configs['model'].ssd.num_classes = 37
train_input_fn = inputs.create_train_input_fn(
train_config=configs['train_config'],
train_input_config=configs['model'], # Expecting `InputReader`.
model_config=configs['model'])
with self.assertRaises(TypeError):
train_input_fn()
def test_error_with_bad_train_model_config(self):
"""Tests that a TypeError is raised with improper train model config."""
configs = _get_configs_for_model('ssd_inception_v2_pets')
configs['model'].ssd.num_classes = 37
train_input_fn = inputs.create_train_input_fn(
train_config=configs['train_config'],
train_input_config=configs['train_input_config'],
model_config=configs['train_config']) # Expecting `DetectionModel`.
with self.assertRaises(TypeError):
train_input_fn()
def test_error_with_bad_eval_config(self):
"""Tests that a TypeError is raised with improper eval config."""
configs = _get_configs_for_model('ssd_inception_v2_pets')
configs['model'].ssd.num_classes = 37
eval_input_fn = inputs.create_eval_input_fn(
eval_config=configs['train_config'], # Expecting `EvalConfig`.
eval_input_config=configs['eval_input_config'],
model_config=configs['model'])
with self.assertRaises(TypeError):
eval_input_fn()
def test_error_with_bad_eval_input_config(self):
"""Tests that a TypeError is raised with improper eval input config."""
configs = _get_configs_for_model('ssd_inception_v2_pets')
configs['model'].ssd.num_classes = 37
eval_input_fn = inputs.create_eval_input_fn(
eval_config=configs['eval_config'],
eval_input_config=configs['model'], # Expecting `InputReader`.
model_config=configs['model'])
with self.assertRaises(TypeError):
eval_input_fn()
def test_error_with_bad_eval_model_config(self):
"""Tests that a TypeError is raised with improper eval model config."""
configs = _get_configs_for_model('ssd_inception_v2_pets')
configs['model'].ssd.num_classes = 37
eval_input_fn = inputs.create_eval_input_fn(
eval_config=configs['eval_config'],
eval_input_config=configs['eval_input_config'],
model_config=configs['eval_config']) # Expecting `DetectionModel`.
with self.assertRaises(TypeError):
eval_input_fn()
class DataAugmentationFnTest(tf.test.TestCase):
def test_apply_image_and_box_augmentation(self):
data_augmentation_options = [
(preprocessor.resize_image, {
'new_height': 20,
'new_width': 20,
'method': tf.image.ResizeMethod.NEAREST_NEIGHBOR
}),
(preprocessor.scale_boxes_to_pixel_coordinates, {}),
]
data_augmentation_fn = functools.partial(
inputs.augment_input_data,
data_augmentation_options=data_augmentation_options)
tensor_dict = {
fields.InputDataFields.image:
tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
fields.InputDataFields.groundtruth_boxes:
tf.constant(np.array([[.5, .5, 1., 1.]], np.float32))
}
augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
with self.test_session() as sess:
augmented_tensor_dict_out = sess.run(augmented_tensor_dict)
self.assertAllEqual(
augmented_tensor_dict_out[fields.InputDataFields.image].shape,
[20, 20, 3]
)
self.assertAllClose(
augmented_tensor_dict_out[fields.InputDataFields.groundtruth_boxes],
[[10, 10, 20, 20]]
)
def test_include_masks_in_data_augmentation(self):
data_augmentation_options = [
(preprocessor.resize_image, {
'new_height': 20,
'new_width': 20,
'method': tf.image.ResizeMethod.NEAREST_NEIGHBOR
})
]
data_augmentation_fn = functools.partial(
inputs.augment_input_data,
data_augmentation_options=data_augmentation_options)
tensor_dict = {
fields.InputDataFields.image:
tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
fields.InputDataFields.groundtruth_instance_masks:
tf.constant(np.zeros([2, 10, 10], np.uint8))
}
augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
with self.test_session() as sess:
augmented_tensor_dict_out = sess.run(augmented_tensor_dict)
self.assertAllEqual(
augmented_tensor_dict_out[fields.InputDataFields.image].shape,
[20, 20, 3])
self.assertAllEqual(augmented_tensor_dict_out[
fields.InputDataFields.groundtruth_instance_masks].shape, [2, 20, 20])
def test_include_keypoints_in_data_augmentation(self):
data_augmentation_options = [
(preprocessor.resize_image, {
'new_height': 20,
'new_width': 20,
'method': tf.image.ResizeMethod.NEAREST_NEIGHBOR
}),
(preprocessor.scale_boxes_to_pixel_coordinates, {}),
]
data_augmentation_fn = functools.partial(
inputs.augment_input_data,
data_augmentation_options=data_augmentation_options)
tensor_dict = {
fields.InputDataFields.image:
tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
fields.InputDataFields.groundtruth_boxes:
tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)),
fields.InputDataFields.groundtruth_keypoints:
tf.constant(np.array([[[0.5, 1.0], [0.5, 0.5]]], np.float32))
}
augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
with self.test_session() as sess:
augmented_tensor_dict_out = sess.run(augmented_tensor_dict)
self.assertAllEqual(
augmented_tensor_dict_out[fields.InputDataFields.image].shape,
[20, 20, 3]
)
self.assertAllClose(
augmented_tensor_dict_out[fields.InputDataFields.groundtruth_boxes],
[[10, 10, 20, 20]]
)
self.assertAllClose(
augmented_tensor_dict_out[fields.InputDataFields.groundtruth_keypoints],
[[[10, 20], [10, 10]]]
)
def _fake_model_preprocessor_fn(image):
return (image, tf.expand_dims(tf.shape(image)[1:], axis=0))
def _fake_image_resizer_fn(image, mask):
return (image, mask, tf.shape(image))
class DataTransformationFnTest(tf.test.TestCase):
def test_returns_correct_class_label_encodings(self):
tensor_dict = {
fields.InputDataFields.image:
tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
fields.InputDataFields.groundtruth_boxes:
tf.constant(np.array([[0, 0, 1, 1], [.5, .5, 1, 1]], np.float32)),
fields.InputDataFields.groundtruth_classes:
tf.constant(np.array([3, 1], np.int32))
}
num_classes = 3
input_transformation_fn = functools.partial(
inputs.transform_input_data,
model_preprocess_fn=_fake_model_preprocessor_fn,
image_resizer_fn=_fake_image_resizer_fn,
num_classes=num_classes)
with self.test_session() as sess:
transformed_inputs = sess.run(
input_transformation_fn(tensor_dict=tensor_dict))
self.assertAllClose(
transformed_inputs[fields.InputDataFields.groundtruth_classes],
[[0, 0, 1], [1, 0, 0]])
def test_returns_correct_merged_boxes(self):
tensor_dict = {
fields.InputDataFields.image:
tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
fields.InputDataFields.groundtruth_boxes:
tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]], np.float32)),
fields.InputDataFields.groundtruth_classes:
tf.constant(np.array([3, 1], np.int32))
}
num_classes = 3
input_transformation_fn = functools.partial(
inputs.transform_input_data,
model_preprocess_fn=_fake_model_preprocessor_fn,
image_resizer_fn=_fake_image_resizer_fn,
num_classes=num_classes,
merge_multiple_boxes=True)
with self.test_session() as sess:
transformed_inputs = sess.run(
input_transformation_fn(tensor_dict=tensor_dict))
self.assertAllClose(
transformed_inputs[fields.InputDataFields.groundtruth_boxes],
[[.5, .5, 1., 1.]])
self.assertAllClose(
transformed_inputs[fields.InputDataFields.groundtruth_classes],
[[1, 0, 1]])
def test_returns_resized_masks(self):
tensor_dict = {
fields.InputDataFields.image:
tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
fields.InputDataFields.groundtruth_instance_masks:
tf.constant(np.random.rand(2, 4, 4).astype(np.float32)),
fields.InputDataFields.groundtruth_classes:
tf.constant(np.array([3, 1], np.int32))
}
def fake_image_resizer_fn(image, masks):
resized_image = tf.image.resize_images(image, [8, 8])
resized_masks = tf.transpose(
tf.image.resize_images(tf.transpose(masks, [1, 2, 0]), [8, 8]),
[2, 0, 1])
return resized_image, resized_masks, tf.shape(resized_image)
num_classes = 3
input_transformation_fn = functools.partial(
inputs.transform_input_data,
model_preprocess_fn=_fake_model_preprocessor_fn,
image_resizer_fn=fake_image_resizer_fn,
num_classes=num_classes)
with self.test_session() as sess:
transformed_inputs = sess.run(
input_transformation_fn(tensor_dict=tensor_dict))
self.assertAllEqual(transformed_inputs[
fields.InputDataFields.groundtruth_instance_masks].shape, [2, 8, 8])
def test_applies_model_preprocess_fn_to_image_tensor(self):
np_image = np.random.randint(256, size=(4, 4, 3))
tensor_dict = {
fields.InputDataFields.image:
tf.constant(np_image),
fields.InputDataFields.groundtruth_classes:
tf.constant(np.array([3, 1], np.int32))
}
def fake_model_preprocessor_fn(image):
return (image / 255., tf.expand_dims(tf.shape(image)[1:], axis=0))
num_classes = 3
input_transformation_fn = functools.partial(
inputs.transform_input_data,
model_preprocess_fn=fake_model_preprocessor_fn,
image_resizer_fn=_fake_image_resizer_fn,
num_classes=num_classes)
with self.test_session() as sess:
transformed_inputs = sess.run(
input_transformation_fn(tensor_dict=tensor_dict))
self.assertAllClose(transformed_inputs[fields.InputDataFields.image],
np_image / 255.)
self.assertAllClose(transformed_inputs[fields.InputDataFields.
true_image_shape],
[4, 4, 3])
def test_applies_data_augmentation_fn_to_tensor_dict(self):
np_image = np.random.randint(256, size=(4, 4, 3))
tensor_dict = {
fields.InputDataFields.image:
tf.constant(np_image),
fields.InputDataFields.groundtruth_classes:
tf.constant(np.array([3, 1], np.int32))
}
def add_one_data_augmentation_fn(tensor_dict):
return {key: value + 1 for key, value in tensor_dict.items()}
num_classes = 4
input_transformation_fn = functools.partial(
inputs.transform_input_data,
model_preprocess_fn=_fake_model_preprocessor_fn,
image_resizer_fn=_fake_image_resizer_fn,
num_classes=num_classes,
data_augmentation_fn=add_one_data_augmentation_fn)
with self.test_session() as sess:
augmented_tensor_dict = sess.run(
input_transformation_fn(tensor_dict=tensor_dict))
self.assertAllEqual(augmented_tensor_dict[fields.InputDataFields.image],
np_image + 1)
self.assertAllEqual(
augmented_tensor_dict[fields.InputDataFields.groundtruth_classes],
[[0, 0, 0, 1], [0, 1, 0, 0]])
def test_applies_data_augmentation_fn_before_model_preprocess_fn(self):
np_image = np.random.randint(256, size=(4, 4, 3))
tensor_dict = {
fields.InputDataFields.image:
tf.constant(np_image),
fields.InputDataFields.groundtruth_classes:
tf.constant(np.array([3, 1], np.int32))
}
def mul_two_model_preprocessor_fn(image):
return (image * 2, tf.expand_dims(tf.shape(image)[1:], axis=0))
def add_five_to_image_data_augmentation_fn(tensor_dict):
tensor_dict[fields.InputDataFields.image] += 5
return tensor_dict
num_classes = 4
input_transformation_fn = functools.partial(
inputs.transform_input_data,
model_preprocess_fn=mul_two_model_preprocessor_fn,
image_resizer_fn=_fake_image_resizer_fn,
num_classes=num_classes,
data_augmentation_fn=add_five_to_image_data_augmentation_fn)
with self.test_session() as sess:
augmented_tensor_dict = sess.run(
input_transformation_fn(tensor_dict=tensor_dict))
self.assertAllEqual(augmented_tensor_dict[fields.InputDataFields.image],
(np_image + 5) * 2)
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
026745467476e61080f1b8483e76fc80ed91ca82 | 8f337d7a1477eb9878bd252f45fadd967ba5dbbe | /run_galfit_disk_only.py | 62c3df5903da86c2f2a4574520757cfb091c1fa8 | [] | no_license | bpRsh/b1_research_lib | bd4c293946329ea96d0fb37d8769aaa83d1ca15d | 1de77f683b3ba18a1ab142b0fe86114c7a67791a | refs/heads/master | 2021-01-15T19:04:32.177465 | 2020-11-23T19:55:34 | 2020-11-23T19:55:34 | 99,805,200 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,674 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-#
#
# Author : Bhishan Poudel; Physics Graduate Student, Ohio University
# Date : 26-Oct-2016 13:10
# Last update : Dec 15, 2016
# Est time : 3 min for one galaxy one filter.
# Main commands : rm -r imgblock.fits subcomps.fit ; galfit expdisk_devauc.sh
# galfit -o3 galfit.01 && rm -r galfit.01
# ds9 -multiframe imgblock.fits subcomps.fits &
# Imports
from __future__ import division, unicode_literals, print_function
import subprocess
import os
import time
from string import ascii_uppercase
import astropy.io
from astropy.io import fits
from astropy.io.fits import getdata
from astropy.io.fits import getheader
from astropy.io.fits import getval
paramfile = r'expdisk_devauc.sh'
def replace_galfit_param(name, value, object_num=1, fixed=True):
"""Replace input galfit parameter file with new configuration.
Arguments:
name : parameter name, e.g. A-P, 1-10, 'Z'
value: new value for the parameter in string form. e.g. '20.0'
object_num: For A-Z object_num is 1
For objects, object_num starts from 1.
fixed: True means parameter will be fixed (0) during fitting.
NOTE: Keep fixed = False while using this function to vary the parameter.
"""
name, value = str(name), str(value)
with open(paramfile) as f:
gf_file = f.readlines()
# Location of param.
# 3rd column is where one can hold the parameters fixed (0) or allow vary 1
loc = [i for i in range(len(gf_file)) if
gf_file[i].strip().startswith(name + ')')][object_num - 1]
param_str = gf_file[loc]
comment = param_str.find('#')
if name in ascii_uppercase:
fmt = '{}) {} {}'
param_str = fmt.format(name, value, param_str[comment:])
else:
fmt = '{}) {} {} {}'
param_str = fmt.format(name, value, '0' if fixed else '1',
param_str[comment:])
gf_file[loc] = param_str
with open(paramfile, 'w') as f:
f.writelines(gf_file)
def run_galfit(galaxy, outdir, count):
"""Run galfit on the input galaxy and create model and residual images.
Runs galfit on the given input galaxies and creates model
and residue images in the output directory
galaxy : base name of input galaxy, e.g f606w or f814w
outdir : output directory, e.g. galfit_outputs
count : count number of galaxy, e.g. 0 for f606w_gal0.fits
Needs : galfit_outputs/two_components/bulge/
galfit_outputs/two_components/disk/
galfit_outputs/two_components/residual/
Note: 1. This program will also read the values of mag and rad from the
input fitsfile header, and updates the value in the
galfit paramfile 'sim2.feedme'.
2. it will also create the mask file using ic command.
"""
# galaxy = f606w or f814w
# path = '/Users/poudel/jedisim/simdatabase/colors'
path = '/Users/poudel/jedisim/simdatabase/galaxies'
ingal = path + '/' + galaxy + '_gal' + str(count) + '.fits'
psf = galaxy + '_psf.fits' # psf in the script directory
# get the value of magnitude, radius and mag0 of input galaxy
try:
mag = getval(ingal, 'MAG')
except:
mag = 20.0
try:
rad = getval(ingal, 'RADIUS')
except:
rad = 10.0
mag0 = getval(ingal, 'MAG0')
# create galfit paramfile according to the input galaxy
# For A-Z object_num is 1
# fixed=True means it is fixed and not changed
print("\n\n\n")
print('+' * 80)
print('+' * 80)
print('+' * 80)
print('{} {} {}'.format('Current Galaxy : ', ingal, ''))
print('+' * 80)
print('+' * 80)
print('+' * 80)
replace_galfit_param('A', ingal, object_num=1, fixed=False)
replace_galfit_param('D', psf, object_num=1, fixed=False)
replace_galfit_param('J', mag0, object_num=1, fixed=False)
replace_galfit_param('3', mag, object_num=1, fixed=False)
replace_galfit_param('4', rad, object_num=1, fixed=False)
replace_galfit_param('3', mag, object_num=2, fixed=False)
replace_galfit_param('4', rad, object_num=2, fixed=False)
# create mask file according to the input galaxy
cmd = "ic '1 0 %1 0 == ?' " + ingal + " > mask.fits"
subprocess.call(cmd, shell=True)
# For objects, object_num starts from 1
# 1 = expdisk, 2 = devauc
# run galfit
# rm -r imgblock.fits subcomps.fits galfit.01 # removes these files.
# galfit sim.feedme # gives galfit.01, imgblock.fits,if succeed.
# galfit -o3 galfit.01 # runs only when galfit.01 exists
# we can delete galfit.01 immediately after it it used.
cmd1 = 'rm -r imgblock.fits; galfit ' + paramfile
cmd2 = 'rm -r subcomps.fits; galfit -o3 galfit.01; rm -r galfit.01'
print("\n\n\n")
print('*' * 80)
print('Running: {}'.format(cmd1))
print('*' * 80)
subprocess.call(cmd1, shell=True) # gives galfit.01 if succeed
if os.path.exists('galfit.01'):
print("\n\n\n")
print('!' * 80)
print('Running: {}'.format(cmd2))
print('!' * 80)
subprocess.call(cmd2, shell=True)
# get residual map from imgblock.fits
# residual = outdir + '/residual/' + galaxy + '_res' + str(count) + '.fits'
# get devauc and expdisk models from subcomps.fits
# galaxy = f606w or f814w
# devauc = bulge and expdisk+residual = disk
# devauc = outdir + '/bulge/' + galaxy + '_bulge' + str(count) + '.fits'
expdisk = outdir + galaxy + '_disk' +\
str(count) + '.fits'
# extracting frames of imgblock.fits and subcomps.fits if they exists.
if os.path.isfile('subcomps.fits') and os.path.isfile('imgblock.fits'):
# for imgblock.fits : 0 is empty, 1 is input, 2 is model, 3 is residual
# dat_res, hdr_res = fits.getdata(r'imgblock.fits', ext=3, header=True)
# for subcomps.fits: 0 is input, 1 is expdisk, 2 is devauc etc.
dat_exp, hdr_exp = fits.getdata(r'subcomps.fits', ext=1, header=True)
# dat_dev, hdr_dev = fits.getdata(r'subcomps.fits', ext=2, header=True)
# fits.writeto(expdisk, dat_exp, hdr_exp, clobber=False)
# fits.writeto(residual, dat_res, hdr_res, clobber=True)
# fits.writeto(devauc, dat_dev, hdr_dev, clobber=True)
fits.writeto(expdisk, dat_exp, hdr_exp, clobber=True)
# print('{} {} {}'.format('Output file: ', expdisk, ''))
# print('{} {} {}'.format('Output file: ', residual, ''))
# print('{} {} {}'.format('Output file: ', devauc, ''))
print('{} {} {}'.format('Output file: ', expdisk, ''))
def main():
"""Main program."""
# output directory without '/' in the end
# range is from 0 to 101 and both f606w and f814w
galfit_outdir = 'disk_only_280_301/'
# there are 302 galaxies for each filter
# for count in list(range(101, 303)):
for count in range(280, 301):
run_galfit('f606w', galfit_outdir, count)
run_galfit('f814w', galfit_outdir, count)
if __name__ == '__main__':
# beginning time
program_begin_time = time.time()
begin_ctime = time.ctime()
# run main program
main()
# print the time taken
program_end_time = time.time()
end_ctime = time.ctime()
seconds = program_end_time - program_begin_time
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
d, h = divmod(h, 24)
print('\nBegin time: ', begin_ctime)
print('End time: ', end_ctime, '\n')
print("Time taken: {0:.0f} days, {1:.0f} hours, \
{2:.0f} minutes, {3:f} seconds.".format(d, h, m, s))
| [
"[email protected]"
] | |
44f6551cecf87e0cc64db8a41ab7784033adc958 | 586e60b4bbf80e3da9c1051182a42cb81bb2ea1b | /scripts/generate-demo-users.py | 787052a0fab94bece1059cc3565abb512a20e0bd | [
"Apache-2.0"
] | permissive | DD-DeCaF/caffeine-bootstrap | daa0cb844fd694b87430451baee664d816e366a7 | ec65cd5f135f86c7bf2faeb96930637e910c380f | refs/heads/master | 2021-07-09T15:18:56.476754 | 2020-08-18T11:16:37 | 2020-08-18T11:16:37 | 161,489,310 | 1 | 0 | Apache-2.0 | 2020-08-18T11:16:38 | 2018-12-12T13:03:41 | Shell | UTF-8 | Python | false | false | 409 | py | from iam.models import User, db
from iam.app import app, init_app
init_app(app, db)
app.app_context().push()
print("Adding user: demo@demo")
user = User(email="demo@demo")
user.set_password("demo")
db.session.add(user)
for i in range(40):
print(f"Adding user: demo{i}@demo (password demo)")
user = User(email=f"demo{i}@demo")
user.set_password("demo")
db.session.add(user)
db.session.commit()
| [
"[email protected]"
] | |
a562ea5925bb853287c30692e331db3ad17821e2 | 8c42964a29af1d5a2f4541ab634b54e25a90b9f4 | /Example2/configuration.py | 5a64a7d9aada01e4a7e1e383119cbc7d566d617f | [] | no_license | lenzip/CMSDataAnalysisSchoolPisa2019ScalarToWW | a21dc572ae2e152410a867ae5013703c886e4bbf | 8cff1dea08887b78a9efc26a142609ba1b7ba296 | refs/heads/master | 2020-04-14T21:13:03.028961 | 2019-01-23T16:22:23 | 2019-01-23T16:22:23 | 164,121,564 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 803 | py | # example of configuration file
tag = 'Inclusive'
# used by mkShape to define output directory for root files
outputDir = 'rootFile'
# file with list of variables
variablesFile = 'variables.py'
# file with list of cuts
cutsFile = 'cuts.py'
# file with list of samples
samplesFile = 'samples.py'
# file with list of samples
plotFile = 'plot.py'
# luminosity to normalize to (in 1/fb)
lumi = 35.867
# used by mkPlot to define output directory for plots
# different from "outputDir" to do things more tidy
outputDirPlots = 'plotsInclusive'
# used by mkDatacards to define output directory for datacards
outputDirDatacard = 'datacardsInclusive'
# structure file for datacard
structureFile = 'structure.py'
# nuisances file for mkDatacards and for mkShape
nuisancesFile = 'nuisances.py'
| [
"[email protected]"
] | |
4dcb1a63e7effceb8e87d2579849844a5dcaecbe | d9eb21a408a449918ed431f760b6a61292869de6 | /Workshops/custom_list/test_custom_list.py | ba4bb2591f76ff8946987ea4c1a7891db8355939 | [] | no_license | zhyordanova/Python-OOP | 5c73ab851848c969beb50b774b67bc9e4c102610 | aad42e108b676de119ac99bef632b76ac595d49a | refs/heads/main | 2023-05-27T06:09:23.524422 | 2021-05-06T22:00:18 | 2021-05-06T22:00:18 | 349,583,825 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 9,477 | py | from unittest import TestCase
from lists.custom_list import ArrayList
class TestArrayList(TestCase):
def setUp(self):
self.al = ArrayList()
def test_append__when_list_is_empty__expect_append_to_the_end(self):
self.al.append(1)
values = list(self.al)
self.assertEqual([1], values)
def test_append__expect_to_return_the_list(self):
result = self.al.append(1)
self.assertEqual(self.al, result)
def test_append__when_list_not_empty__expect_append_to_the_end(self):
self.al.append(1)
self.al.append(2)
self.al.append(3)
values = list(self.al)
self.assertEqual([1, 2, 3], values)
def test_append__1024_values__expect_append_to_the_end(self):
values = [x for x in range(1024)]
[self.al.append(x) for x in values]
list_value = list(self.al)
self.assertEqual(values, list_value)
def test_append__expect_to_increase_size(self):
self.al.append(1)
self.assertEqual(1, self.al.size())
def test_remove__when_index_is_valid__expect_remove_value_and_return_it(self):
self.al.append(1)
self.al.append(2)
self.al.append(333)
self.al.append(4)
result = self.al.remove(2)
self.assertEqual([1, 2, 4], list(self.al))
self.assertEqual(333, result)
def test_remove__when_index_is_invalid__expect_to_raise(self):
self.al.append(1)
self.al.append(2)
self.al.append(3)
self.al.append(4)
with self.assertRaises(IndexError):
self.al.remove(self.al.size())
def test_get__when_index_is_valid__expect_to_return_it(self):
self.al.append(1)
self.al.append(2)
self.al.append(333)
self.al.append(4)
result = self.al.get(2)
self.assertEqual(333, result)
def test_get__when_index_is_invalid__expect_to_raise(self):
self.al.append(1)
self.al.append(2)
self.al.append(3)
self.al.append(4)
with self.assertRaises(IndexError):
self.al.get(self.al.size())
def test_extend_whit_empty_iterable__expect_to_be_same(self):
self.al.append(1)
self.al.extend([])
self.assertEqual([1], list(self.al))
def test_extend_whit_list_iterable__expect_to_append_the_list(self):
self.al.append(1)
self.al.extend([2])
self.assertEqual([1, 2], list(self.al))
def test_extend_whit_generator__expect_to_append_the_list(self):
self.al.append(1)
self.al.extend((x for x in range(1)))
self.assertEqual([1, 0], list(self.al))
def test_extend_when_empty__expect_to_append_to_list(self):
self.al.append(1)
self.al.extend([1])
self.assertEqual([1, 1], list(self.al))
def test_extend_whit_no_iterable__expect_to_raise(self):
self.al.append(1)
with self.assertRaises(ValueError):
self.al.extend(2)
def test_insert__when_index_is_valid__expect_to_place_value_at_index(self):
self.al.append(1)
self.al.append(2)
self.al.append(4)
self.al.append(5)
self.al.append(6)
self.al.append(7)
self.al.append(8)
self.al.append(9)
self.al.insert(2, 333)
self.assertEqual([1, 2, 333, 4, 5, 6, 7, 8, 9], list(self.al))
def test_insert__when_index_is_invalid__expect_to_raise(self):
self.al.append(1)
self.al.append(2)
self.al.append(3)
with self.assertRaises(IndexError):
self.al.insert(self.al.size() + 1, 2)
def test_pop__expect_to_remove_last_element_and_return_it(self):
self.al.append(1)
self.al.append(2)
self.al.append(3)
self.al.append(4)
result = self.al.pop()
self.assertEqual(4, result)
self.assertEqual([1, 2, 3], list(self.al))
def test_pop__when_empty__expect_to_raise(self):
with self.assertRaises(IndexError):
self.al.pop()
def test_clear__expect_to_be_empty(self):
[self.al.append(x) for x in range(15)]
self.al.clear()
self.assertEqual([], list(self.al))
def test_index__when_item_is_present__expect_return_correct_index(self):
[self.al.append(x) for x in range(15)]
index = self.al.index(5)
self.assertEqual(5, index)
def test_index__when_item_is_not_present__expect_raise(self):
[self.al.append(x) for x in range(15)]
with self.assertRaises(ValueError):
self.al.index(17)
def test_count__when_item_is_present_one_time__expected_to_return_1(self):
[self.al.append(x) for x in range(15)]
expected_count = 1
actual_count = self.al.count(5)
self.assertEqual(expected_count, actual_count)
def test_count__when_item_is_present_multiple_times__expected_to_return_correct_count(self):
[self.al.append(x) for x in range(15)]
self.al.append(5)
self.al.insert(3, 5)
self.al.insert(7, 5)
self.al.insert(1, 5)
self.al.insert(9, 5)
expected_count = 6
actual_count = self.al.count(5)
self.assertEqual(expected_count, actual_count)
def test_count__when_item_is_present_multiple_times_and_once_poped__expected_to_return_correct_count(self):
[self.al.append(x) for x in range(15)]
self.al.insert(3, 5)
self.al.insert(7, 5)
self.al.insert(1, 5)
self.al.insert(9, 5)
self.al.append(5)
self.al.pop()
expected_count = 5
actual_count = self.al.count(5)
self.assertEqual(expected_count, actual_count)
def test_count__when_item_is_not_present__expected_to_return_0(self):
[self.al.append(x) for x in range(15)]
expected_count = 0
actual_count = self.al.count(55)
self.assertEqual(expected_count, actual_count)
def test_reversed__expect_in_reversed_order(self):
[self.al.append(x) for x in range(5)]
expected = [x for x in range(4, -1, -1)]
actual = self.al.reverse()
self.assertEqual(expected, actual)
def test_copy__expect_to_return_another_list_with_same_value(self):
[self.al.append(x) for x in range(5)]
copied_list = self.al.copy()
expected_result = [x for x in range(5)]
actual_result = list(copied_list)
self.assertNotEqual(copied_list, self.al)
self.assertEqual(expected_result, actual_result)
def test_add_first__when_empty__expect_to_add(self):
self.al.add_first(1)
self.assertListEqual([1], list(self.al))
def test_add_first__when_non_empty__expect_to_add(self):
[self.al.append(x) for x in range(5)]
self.al.add_first(1)
self.assertListEqual([1, 0, 1, 2, 3, 4], list(self.al))
def test_dictionize__when_empty__expect_dict(self):
expected = {}
actual = self.al.dictionize()
self.assertEqual(expected, actual)
def test_dictionize__when_even_elements_count_expect_coorct_result(self):
self.al.append(1)
self.al.append(2)
self.al.append(3)
self.al.append(4)
expected = {
1: 2,
3: 4,
}
actual = self.al.dictionize()
self.assertEqual(expected, actual)
def test_dictionize__when_odd_elements_count_expect_coorct_result(self):
self.al.append(1)
self.al.append(2)
self.al.append(3)
self.al.append(4)
self.al.append(5)
expected = {
1: 2,
3: 4,
5: ' ',
}
actual = self.al.dictionize()
self.assertEqual(expected, actual)
def test_move_list_empty__expect_to_move_nothing(self):
self.al.move(1)
self.assertEqual([], list(self.al))
def test_move__when_moving_1_element__expect_to_move_1_element(self):
self.al.append(1)
self.al.append(2)
self.al.append(3)
self.al.append(4)
self.al.move(1)
self.assertEqual([2, 3, 4, 1], list(self.al))
def test_move__when_moving_3_elements__expect_to_move_3_elements(self):
self.al.append(1)
self.al.append(2)
self.al.append(3)
self.al.append(4)
self.al.move(3)
self.assertEqual([4, 1, 2, 3], list(self.al))
def test_move__when_moving_3_values_and_have_2_values__expect_to_move_3_value_from_the_start_to_the_end(self):
self.al.append(1)
self.al.append(2)
self.al.move(3)
self.assertEqual([2, 1], list(self.al))
def test_sum__when_values__expected_to_return_correct_sum(self):
self.al.append(1)
self.al.append('2')
self.al.append(3)
expected = 5
actual = self.al.sum()
self.assertEqual(expected, actual)
def test_sum__when_empty_expected_to_return_0(self):
self.assertEqual(0, self.al.sum())
def test_overbound__expect_to_return_min_value(self):
values = [x for x in range(15)]
[self.al.append(x) for x in values]
expected = max(values)
actual = self.al.overbound()
self.assertEqual(expected, actual)
def test_underbound__expect_to_return_min_value(self):
values = [x for x in range(15)]
[self.al.append(x) for x in values]
expected = min(values)
actual = self.al.underbound()
self.assertEqual(expected, actual)
| [
"[email protected]"
] | |
30d076a33b413db6d98a89853257711172247372 | 60f067710243089ea5a09c676f8092232904ed40 | /ltp/task_segmention.py | bfd04d9af9f685de08e23778fb8c48e4e00e5b95 | [] | no_license | liyang-2401/ltp | cfc5386fe9cebc78f828431b1c04d8288d450678 | 5d26093f2e2bbec76a892dd25e206d9e7dacc13e | refs/heads/master | 2023-01-22T14:43:16.871839 | 2020-12-04T08:00:23 | 2020-12-04T08:00:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,058 | py | import types
import numpy
import torch
import torch.utils.data
import os
from tqdm import tqdm
from argparse import ArgumentParser
from ltp.data import dataset as datasets
from ltp import optimization
from ltp.data.utils import collate
from seqeval.metrics import f1_score
from ltp.transformer_linear import TransformerLinear as Model
import pytorch_lightning as pl
from pytorch_lightning import Trainer
from transformers import AutoTokenizer
from ltp.utils import TaskInfo, common_train, map2device, convert2npy
os.environ['TOKENIZERS_PARALLELISM'] = 'true'
task_info = TaskInfo(task_name='seg', metric_name='f1')
# CUDA_VISIBLE_DEVICES=0 PYTHONPATH=. python ltp/task_segmention.py --data_dir=data/seg --num_labels=2 --max_epochs=10 --batch_size=16 --gpus=1 --precision=16 --auto_lr_find=lr
def build_dataset(model, data_dir):
dataset = datasets.load_dataset(
datasets.Conllu,
data_dir=data_dir,
cache_dir=data_dir
)
dataset.remove_columns_(["id", "lemma", "upos", "xpos", "feats", "head", "deprel", "deps", "misc"])
tokenizer = AutoTokenizer.from_pretrained(model.hparams.transformer, use_fast=True)
# {'B':1, 'I':0}
def tokenize(examples):
res = tokenizer(
examples['form'],
is_split_into_words=True,
max_length=model.transformer.config.max_position_embeddings,
truncation=True
)
labels = []
for encoding in res.encodings:
labels.append([])
last_word_idx = -1
for word_idx in encoding.words[1:-1]:
labels[-1].append(int(word_idx != last_word_idx))
last_word_idx = word_idx
res['labels'] = labels
return res
dataset = dataset.map(
lambda examples: tokenize(examples), batched=True,
cache_file_names={
k: d._get_cache_file_path(f"{task_info.task_name}-{k}-tokenized") for k, d in dataset.items()
}
)
dataset.set_format(type='torch', columns=['input_ids', 'token_type_ids', 'attention_mask', 'labels'])
dataset.shuffle(
indices_cache_file_names={
k: d._get_cache_file_path(f"{task_info.task_name}-{k}-shuffled-index-{model.hparams.seed}") for k, d in
dataset.items()
}
)
return dataset, f1_score
def validation_method(metric, loss_tag='val_loss', metric_tag=f'val_{task_info.metric_name}', log=True):
label_mapper = ['I-W', 'B-W']
def step(self: pl.LightningModule, batch, batch_nb):
loss, logits = self(**batch)
mask = batch['attention_mask'][:, 2:] != 1
# acc
labels = batch['labels']
preds = torch.argmax(logits, dim=-1)
labels[mask] = -1
preds[mask] = -1
labels = [[label_mapper[word] for word in sent if word != -1] for sent in labels.detach().cpu().numpy()]
preds = [[label_mapper[word] for word in sent if word != -1] for sent in preds.detach().cpu().numpy()]
return {'loss': loss.item(), 'pred': preds, 'labels': labels}
def epoch_end(self: pl.LightningModule, outputs):
if isinstance(outputs, dict):
outputs = [outputs]
length = len(outputs)
loss = sum([output['loss'] for output in outputs]) / length
preds = sum([output['pred'] for output in outputs], [])
labels = sum([output['labels'] for output in outputs], [])
f1 = metric(preds, labels)
if log:
self.log_dict(
dictionary={loss_tag: loss, metric_tag: f1},
on_step=False, on_epoch=True, prog_bar=True, logger=True
)
else:
return f1
return step, epoch_end
def build_method(model):
dataset, metric = build_dataset(model, model.hparams.data_dir)
def train_dataloader(self):
res = torch.utils.data.DataLoader(
dataset[datasets.Split.TRAIN],
batch_size=self.hparams.batch_size,
collate_fn=collate,
num_workers=self.hparams.num_workers,
pin_memory=True
)
return res
def training_step(self, batch, batch_nb):
loss, logits = self(**batch)
self.log("loss", loss.item())
return loss
def val_dataloader(self):
return torch.utils.data.DataLoader(
dataset[datasets.Split.VALIDATION],
batch_size=self.hparams.batch_size,
collate_fn=collate,
num_workers=self.hparams.num_workers,
pin_memory=True
)
def test_dataloader(self):
return torch.utils.data.DataLoader(
dataset[datasets.Split.TEST],
batch_size=self.hparams.batch_size,
collate_fn=collate,
num_workers=self.hparams.num_workers,
pin_memory=True
)
# AdamW + LR scheduler
def configure_optimizers(self: Model):
num_epoch_steps = (len(dataset[datasets.Split.TRAIN]) + self.hparams.batch_size - 1) // self.hparams.batch_size
num_train_steps = num_epoch_steps * self.hparams.max_epochs
optimizer, scheduler = optimization.create_optimizer(
self,
lr=self.hparams.lr,
num_train_steps=num_train_steps,
weight_decay=self.hparams.weight_decay,
warmup_steps=self.hparams.warmup_steps,
warmup_proportion=self.hparams.warmup_proportion,
layerwise_lr_decay_power=self.hparams.layerwise_lr_decay_power,
n_transformer_layers=self.transformer.config.num_hidden_layers,
lr_scheduler=optimization.get_polynomial_decay_schedule_with_warmup,
lr_scheduler_kwargs={
'lr_end': self.hparams.lr_end,
'power': self.hparams.lr_decay_power
}
)
return [optimizer], [{'scheduler': scheduler, 'interval': 'step'}]
model.configure_optimizers = types.MethodType(configure_optimizers, model)
model.train_dataloader = types.MethodType(train_dataloader, model)
model.training_step = types.MethodType(training_step, model)
# model.training_epoch_end = types.MethodType(training_epoch_end, model)
validation_step, validation_epoch_end = validation_method(
metric, loss_tag='val_loss', metric_tag=f'val_{task_info.metric_name}'
)
model.val_dataloader = types.MethodType(val_dataloader, model)
model.validation_step = types.MethodType(validation_step, model)
model.validation_epoch_end = types.MethodType(validation_epoch_end, model)
test_step, test_epoch_end = validation_method(
metric, loss_tag='test_loss', metric_tag=f'test_{task_info.metric_name}'
)
model.test_dataloader = types.MethodType(test_dataloader, model)
model.test_step = types.MethodType(test_step, model)
model.test_epoch_end = types.MethodType(test_epoch_end, model)
def add_task_specific_args(parent_parser):
parser = ArgumentParser(parents=[parent_parser], add_help=False)
parser.add_argument('--seed', type=int, default=19980524)
parser.add_argument('--batch_size', type=int, default=16)
parser.add_argument('--num_workers', type=int, default=4)
parser.add_argument('--data_dir', type=str, required=True)
parser.add_argument('--build_dataset', action='store_true')
return parser
def build_distill_dataset(args):
model = Model.load_from_checkpoint(
args.resume_from_checkpoint, hparams=args
)
model.eval()
model.freeze()
dataset, metric = build_dataset(model, args.data_dir)
train_dataloader = torch.utils.data.DataLoader(
dataset[datasets.Split.TRAIN],
batch_size=args.batch_size,
collate_fn=collate,
num_workers=args.num_workers
)
output = os.path.join(args.data_dir, 'output.pt')
if torch.cuda.is_available():
model.cuda()
map2cpu = lambda x: map2device(x)
map2cuda = lambda x: map2device(x, model.device)
else:
map2cpu = lambda x: x
map2cuda = lambda x: x
with torch.no_grad():
batchs = []
for batch in tqdm(train_dataloader):
batch = map2cuda(batch)
loss, logits = model(**batch)
batch.update(logits=logits)
batchs.append(map2cpu(batch))
numpy.savez(output, data=convert2npy(batchs))
print("Done")
def main():
parser = ArgumentParser()
# add task level args
parser = add_task_specific_args(parser)
# add model specific args
parser = Model.add_model_specific_args(parser)
parser = optimization.add_optimizer_specific_args(parser)
parser = Trainer.add_argparse_args(parser)
# set task specific args
parser.set_defaults(num_labels=2)
args = parser.parse_args()
if args.build_dataset:
build_distill_dataset(args)
else:
common_train(
args,
metric=f'val_{task_info.metric_name}',
model_class=Model,
build_method=build_method,
task=task_info.task_name
)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
3c7c5139a5cd6dd8e33834de89b98fdd8bba4a33 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/length_20200529113854.py | 76b776e2932e64a11975284ff9a772f9332ca676 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | def removeDuplicates(nums):
i = 0
while i <len(nums):
print(nums[i])
if nums[i] == nums[i+1]:
nums.remove(nums[i])
else:
nums.add(nums[i])
# for i in range(length):
# print('i--------->',i)
# for j in range(i+1,length):
# print('j----->',j)
removeDuplicates([1,1,2])
| [
"[email protected]"
] | |
8097d71b8ebae32d7fdc01e7873b5ee6d6ad0fb4 | c01ab71f681efdeb9f4e7d52ed083745b6d42590 | /old/6th sem/cpp/TRIKA/test_modules/testCases.py | 96b35814c7b3c3e9a1a25b8848bf226225f18b05 | [] | no_license | anant-pushkar/competetive_programming_codes | 398a39c85a761c8d242f42f368933239a438ac06 | 127c67d7d4e2cef2d1f25189b6535606f4523af6 | refs/heads/master | 2021-01-20T11:57:07.528790 | 2014-11-14T08:29:21 | 2014-11-14T08:29:21 | 23,577,655 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 475 | py | import testTemplate
'''number of test suites'''
nSuites=1
def getTests():
tests = []
suite=testTemplate.testSuite("Sample 1")
testcase = testTemplate.testInstance("4 4\n1 1\n100 55 10 2\n20 10 90 1\n60 20 22 4\n1 30 70 5" , "Y 23" , "")
suite.add(testcase)
tests.append(suite)
suite=testTemplate.testSuite("Sample 2")
testcase = testTemplate.testInstance("2 2\n1 1\n1 55 \n20 10 " , "N" , "")
suite.add(testcase)
tests.append(suite)
return tests
| [
"[email protected]"
] | |
30980eca76f9208b779a5f3c5e0e65affab9eb1c | 5e4897b32cd19d145cefc4451ced910313cde0bb | /sphinxextra/phpdomain.py | 4380ed3ab6981611f85ff56abfe6880149f92879 | [] | no_license | Tinkerforge/doc | 7e87edcf8d8b67d1edce749c4a3106f431a77585 | 19e49bad70fbe644aa9b4af4d64f99aa0cf71d7f | refs/heads/master | 2023-08-20T22:10:37.363910 | 2023-08-17T13:33:28 | 2023-08-17T13:33:28 | 2,262,338 | 6 | 8 | null | 2023-07-24T13:46:27 | 2011-08-24T15:21:34 | Python | UTF-8 | Python | false | false | 34,922 | py | # -*- coding: utf-8 -*-
"""
sphinx.domains.php
~~~~~~~~~~~~~~~~~~
The PHP language domain.
:copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from copy import deepcopy
from docutils import nodes
from sphinx import addnodes
from sphinx.roles import XRefRole
from sphinx.locale import l_, _
from sphinx.domains import Domain, ObjType
from sphinx.directives import ObjectDescription
from sphinx.util.nodes import make_refnode
from sphinx.util.compat import Directive
from sphinx.util.docfields import TypedField
from sphinxextra.utils import fixup_index_entry
# Olaf: add [\[\]]*, remove \b to allow java arrays, add \. to allow Class1.Class2
#_identifier_re = re.compile(r'\b(~?[a-zA-Z_][a-zA-Z0-9_]*)\b')
_identifier_re = re.compile(r'\$?\b(~?[a-zA-Z_\$][a-zA-Z0-9_\.]*[\[\]]*)')
_whitespace_re = re.compile(r'\s+(?u)')
_string_re = re.compile(r"[LuU8]?('([^'\\]*(?:\\.[^'\\]*)*)'"
r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S)
_visibility_re = re.compile(r'\b(public|private|protected)\b')
_operator_re = re.compile(r'''(?x)
\[\s*\]
| \(\s*\)
| [!<>=/*%+|&^-]=?
| \+\+ | --
| (<<|>>)=? | ~ | && | \| | \|\|
| ->\*? | \,
''')
_id_shortwords = {
'char': 'c',
'signed char': 'c',
'unsigned char': 'C',
'int': 'i',
'signed int': 'i',
'unsigned int': 'U',
'long': 'l',
'signed long': 'l',
'unsigned long': 'L',
'bool': 'b',
'size_t': 's',
'std::string': 'ss',
'std::ostream': 'os',
'std::istream': 'is',
'std::iostream': 'ios',
'std::vector': 'v',
'std::map': 'm',
'operator[]': 'subscript-operator',
'operator()': 'call-operator',
'operator!': 'not-operator',
'operator<': 'lt-operator',
'operator<=': 'lte-operator',
'operator>': 'gt-operator',
'operator>=': 'gte-operator',
'operator=': 'assign-operator',
'operator/': 'div-operator',
'operator*': 'mul-operator',
'operator%': 'mod-operator',
'operator+': 'add-operator',
'operator-': 'sub-operator',
'operator|': 'or-operator',
'operator&': 'and-operator',
'operator^': 'xor-operator',
'operator&&': 'sand-operator',
'operator||': 'sor-operator',
'operator==': 'eq-operator',
'operator!=': 'neq-operator',
'operator<<': 'lshift-operator',
'operator>>': 'rshift-operator',
'operator-=': 'sub-assign-operator',
'operator+=': 'add-assign-operator',
'operator*-': 'mul-assign-operator',
'operator/=': 'div-assign-operator',
'operator%=': 'mod-assign-operator',
'operator&=': 'and-assign-operator',
'operator|=': 'or-assign-operator',
'operator<<=': 'lshift-assign-operator',
'operator>>=': 'rshift-assign-operator',
'operator^=': 'xor-assign-operator',
'operator,': 'comma-operator',
'operator->': 'pointer-operator',
'operator->*': 'pointer-by-pointer-operator',
'operator~': 'inv-operator',
'operator++': 'inc-operator',
'operator--': 'dec-operator',
'operator new': 'new-operator',
'operator new[]': 'new-array-operator',
'operator delete': 'delete-operator',
'operator delete[]': 'delete-array-operator'
}
class DefinitionError(Exception):
def __init__(self, description):
self.description = description
def __unicode__(self):
return self.description
def __str__(self):
return unicode(self.encode('utf-8'))
class DefExpr(object):
def __unicode__(self):
raise NotImplementedError()
def __eq__(self, other):
if type(self) is not type(other):
return False
try:
for key, value in self.__dict__.iteritems():
if value != getattr(other, value):
return False
except AttributeError:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def clone(self):
"""Close a definition expression node"""
return deepcopy(self)
def get_id(self):
"""Returns the id for the node"""
return u''
def get_name(self):
"""Returns the name. Returns either `None` or a node with
a name you might call :meth:`split_owner` on.
"""
return None
def split_owner(self):
"""Nodes returned by :meth:`get_name` can split off their
owning parent. This function returns the owner and the
name as a tuple of two items. If a node does not support
it, :exc:`NotImplementedError` is raised.
"""
raise NotImplementedError()
def prefix(self, prefix):
"""Prefixes a name node (a node returned by :meth:`get_name`)."""
raise NotImplementedError()
def __str__(self):
return unicode(self).encode('utf-8')
def __repr__(self):
return '<defexpr %s>' % self
class PrimaryDefExpr(DefExpr):
def get_name(self):
return self
def split_owner(self):
return None, self
def prefix(self, prefix):
if isinstance(prefix, PathDefExpr):
prefix = prefix.clone()
prefix.path.append(self)
return prefix
return PathDefExpr([prefix, self])
class NameDefExpr(PrimaryDefExpr):
def __init__(self, name):
self.name = name
def get_id(self):
name = _id_shortwords.get(self.name)
if name is not None:
return name
return self.name.replace(u' ', u'-')
def __unicode__(self):
return unicode(self.name)
class PathDefExpr(PrimaryDefExpr):
def __init__(self, parts):
self.path = parts
def get_id(self):
rv = u'::'.join(x.get_id() for x in self.path)
return _id_shortwords.get(rv, rv)
def split_owner(self):
if len(self.path) > 1:
return PathDefExpr(self.path[:-1]), self.path[-1]
return None, self
def prefix(self, prefix):
if isinstance(prefix, PathDefExpr):
prefix = prefix.clone()
prefix.path.extend(self.path)
return prefix
return PathDefExpr([prefix] + self.path)
def __unicode__(self):
return u'::'.join(map(unicode, self.path))
class TemplateDefExpr(PrimaryDefExpr):
def __init__(self, typename, args):
self.typename = typename
self.args = args
def split_owner(self):
owner, typename = self.typename.split_owner()
return owner, TemplateDefExpr(typename, self.args)
def get_id(self):
return u'%s:%s:' % (self.typename.get_id(),
u'.'.join(x.get_id() for x in self.args))
def __unicode__(self):
return u'%s<%s>' % (self.typename, u', '.join(map(unicode, self.args)))
class WrappingDefExpr(DefExpr):
def __init__(self, typename):
self.typename = typename
def get_name(self):
return self.typename.get_name()
class ModifierDefExpr(WrappingDefExpr):
def __init__(self, typename, modifiers):
WrappingDefExpr.__init__(self, typename)
self.modifiers = modifiers
def get_id(self):
pieces = [_id_shortwords.get(unicode(x), unicode(x))
for x in self.modifiers]
pieces.append(self.typename.get_id())
return u'-'.join(pieces)
def __unicode__(self):
return u' '.join(map(unicode, list(self.modifiers) + [self.typename]))
class PtrDefExpr(WrappingDefExpr):
def get_id(self):
return self.typename.get_id() + u'P'
def __unicode__(self):
return u'%s*' % self.typename
class RefDefExpr(WrappingDefExpr):
def get_id(self):
return self.typename.get_id() + u'R'
def __unicode__(self):
return u'%s&' % self.typename
class ConstDefExpr(WrappingDefExpr):
def __init__(self, typename, prefix=False):
WrappingDefExpr.__init__(self, typename)
self.prefix = prefix
def get_id(self):
return self.typename.get_id() + u'C'
def __unicode__(self):
return (self.prefix and u'const %s' or u'%s const') % self.typename
class CastOpDefExpr(PrimaryDefExpr):
def __init__(self, typename):
self.typename = typename
def get_id(self):
return u'castto-%s-operator' % self.typename.get_id()
def __unicode__(self):
return u'operator %s' % self.typename
class ArgumentDefExpr(DefExpr):
def __init__(self, type, name, default=None):
self.name = name
self.type = type
self.default = default
def get_name(self):
return self.name.get_name()
def get_id(self):
if self.type is None:
return 'X'
return self.type.get_id()
def __unicode__(self):
return (self.type is not None and u'%s %s' % (self.type, self.name)
or unicode(self.name)) + (self.default is not None and
u'=%s' % self.default or u'')
class NamedDefExpr(DefExpr):
def __init__(self, name, visibility, static):
self.name = name
self.visibility = visibility
self.static = static
def get_name(self):
return self.name.get_name()
def get_modifiers(self):
rv = []
if self.visibility != 'public':
rv.append(self.visibility)
if self.static:
rv.append(u'static')
return rv
class TypeObjDefExpr(NamedDefExpr):
def __init__(self, name, visibility, static, typename):
NamedDefExpr.__init__(self, name, visibility, static)
self.typename = typename
def get_id(self):
if self.typename is None:
return self.name.get_id()
return u'%s__%s' % (self.name.get_id(), self.typename.get_id())
def __unicode__(self):
buf = self.get_modifiers()
if self.typename is None:
buf.append(unicode(self.name))
else:
buf.extend(map(unicode, (self.typename, self.name)))
return u' '.join(buf)
class MemberObjDefExpr(NamedDefExpr):
def __init__(self, name, visibility, static, typename, value):
NamedDefExpr.__init__(self, name, visibility, static)
self.typename = typename
self.value = value
def get_id(self):
return u'%s__%s' % (self.name.get_id(), self.typename.get_id())
def __unicode__(self):
buf = self.get_modifiers()
buf.append(u'%s %s' % (self.typename, self.name))
if self.value is not None:
buf.append(u'= %s' % self.value)
return u' '.join(buf)
class FuncDefExpr(NamedDefExpr):
def __init__(self, name, visibility, static, explicit, rv,
signature, const, pure_virtual):
NamedDefExpr.__init__(self, name, visibility, static)
self.rv = rv
self.signature = signature
self.explicit = explicit
self.const = const
self.pure_virtual = pure_virtual
def get_id(self):
return u'%s%s%s' % (
self.name.get_id(),
self.signature and u'__' +
u'.'.join(x.get_id() for x in self.signature) or u'',
self.const and u'C' or u''
)
def __unicode__(self):
buf = self.get_modifiers()
if self.explicit:
buf.append(u'explicit')
if self.rv is not None:
buf.append(unicode(self.rv))
buf.append(u'%s(%s)' % (self.name, u', '.join(
map(unicode, self.signature))))
if self.const:
buf.append(u'const')
if self.pure_virtual:
buf.append(u'= 0')
return u' '.join(buf)
class ClassDefExpr(NamedDefExpr):
def __init__(self, name, visibility, static):
NamedDefExpr.__init__(self, name, visibility, static)
def get_id(self):
return self.name.get_id()
def __unicode__(self):
buf = self.get_modifiers()
buf.append(unicode(self.name))
return u' '.join(buf)
class DefinitionParser(object):
# mapping of valid type modifiers. if the set is None it means
# the modifier can prefix all types, otherwise only the types
# (actually more keywords) in the set. Also check
# _guess_typename when changing this.
_modifiers = {
'volatile': None,
'register': None,
'mutable': None,
'const': None,
'typename': None,
'unsigned': set(('char', 'int', 'long')),
'signed': set(('char', 'int', 'long')),
'short': set(('int', 'short')),
'long': set(('int', 'long', 'double'))
}
def __init__(self, definition):
self.definition = definition.strip()
self.pos = 0
self.end = len(self.definition)
self.last_match = None
self._previous_state = (0, None)
def fail(self, msg):
raise DefinitionError('Invalid definition: %s [error at %d]\n %s' %
(msg, self.pos, self.definition))
def match(self, regex):
match = regex.match(self.definition, self.pos)
if match is not None:
self._previous_state = (self.pos, self.last_match)
self.pos = match.end()
self.last_match = match
return True
return False
def backout(self):
self.pos, self.last_match = self._previous_state
def skip_string(self, string):
strlen = len(string)
if self.definition[self.pos:self.pos + strlen] == string:
self.pos += strlen
return True
return False
def skip_word(self, word):
return self.match(re.compile(r'\b%s\b' % re.escape(word)))
def skip_ws(self):
return self.match(_whitespace_re)
@property
def eof(self):
return self.pos >= self.end
@property
def current_char(self):
try:
return self.definition[self.pos]
except IndexError:
return 'EOF'
@property
def matched_text(self):
if self.last_match is not None:
return self.last_match.group()
def _parse_operator(self):
self.skip_ws()
# thank god, a regular operator definition
if self.match(_operator_re):
return NameDefExpr('operator' +
_whitespace_re.sub('', self.matched_text))
# new/delete operator?
for allocop in 'new', 'delete':
if not self.skip_word(allocop):
continue
self.skip_ws()
if self.skip_string('['):
self.skip_ws()
if not self.skip_string(']'):
self.fail('expected "]" for ' + allocop)
allocop += '[]'
return NameDefExpr('operator ' + allocop)
# oh well, looks like a cast operator definition.
# In that case, eat another type.
type = self._parse_type()
return CastOpDefExpr(type)
def _parse_name(self):
if not self.match(_identifier_re):
print self.definition, self.pos
self.fail('expected name')
identifier = self.matched_text
# strictly speaking, operators are not regular identifiers
# but because operator is a keyword, it might not be used
# for variable names anyways, so we can safely parse the
# operator here as identifier
if identifier == 'operator':
return self._parse_operator()
return NameDefExpr(identifier)
def _guess_typename(self, path):
if not path:
return [], 'int'
# for the long type, we don't want the int in there
if 'long' in path:
path = [x for x in path if x != 'int']
# remove one long
path.remove('long')
return path, 'long'
if path[-1] in ('int', 'char'):
return path[:-1], path[-1]
return path, 'int'
def _attach_crefptr(self, expr, is_const=False):
if is_const:
expr = ConstDefExpr(expr, prefix=True)
while 1:
self.skip_ws()
if self.skip_word('const'):
expr = ConstDefExpr(expr)
elif self.skip_string('*'):
expr = PtrDefExpr(expr)
elif self.skip_string('&'):
expr = RefDefExpr(expr)
else:
return expr
def _peek_const(self, path):
try:
path.remove('const')
return True
except ValueError:
return False
def _parse_builtin(self, modifier):
path = [modifier]
following = self._modifiers[modifier]
while 1:
self.skip_ws()
if not self.match(_identifier_re):
break
identifier = self.matched_text
if identifier in following:
path.append(identifier)
following = self._modifiers[modifier]
assert following
else:
self.backout()
break
is_const = self._peek_const(path)
modifiers, typename = self._guess_typename(path)
# Olaf: don't use typename (this makes "short int" from "short" etc)
if typename != 'long':
typename = ''
rv = ModifierDefExpr(NameDefExpr(typename), modifiers)
return self._attach_crefptr(rv, is_const)
def _parse_type_expr(self):
typename = self._parse_name()
self.skip_ws()
if not self.skip_string('<'):
return typename
args = []
while 1:
self.skip_ws()
if self.skip_string('>'):
break
if args:
if not self.skip_string(','):
self.fail('"," or ">" in template expected')
self.skip_ws()
args.append(self._parse_type(True))
return TemplateDefExpr(typename, args)
def _parse_type(self, in_template=False):
self.skip_ws()
result = []
modifiers = []
# if there is a leading :: or not, we don't care because we
# treat them exactly the same. Buf *if* there is one, we
# don't have to check for type modifiers
if not self.skip_string('::'):
self.skip_ws()
while self.match(_identifier_re):
modifier = self.matched_text
if modifier in self._modifiers:
following = self._modifiers[modifier]
# if the set is not none, there is a limited set
# of types that might follow. It is technically
# impossible for a template to follow, so what
# we do is go to a different function that just
# eats types
if following is not None:
return self._parse_builtin(modifier)
modifiers.append(modifier)
else:
self.backout()
break
while 1:
self.skip_ws()
if (in_template and self.current_char in ',>') or \
(result and not self.skip_string('::')) or \
self.eof:
break
result.append(self._parse_type_expr())
if not result:
self.fail('expected type')
if len(result) == 1:
rv = result[0]
else:
rv = PathDefExpr(result)
is_const = self._peek_const(modifiers)
if modifiers:
rv = ModifierDefExpr(rv, modifiers)
return self._attach_crefptr(rv, is_const)
def _parse_default_expr(self):
self.skip_ws()
if self.match(_string_re):
return self.matched_text
idx1 = self.definition.find(',', self.pos)
idx2 = self.definition.find(')', self.pos)
if idx1 < 0:
idx = idx2
elif idx2 < 0:
idx = idx1
else:
idx = min(idx1, idx2)
if idx < 0:
self.fail('unexpected end in default expression')
rv = self.definition[self.pos:idx]
self.pos = idx
return rv
def _parse_signature(self):
self.skip_ws()
if not self.skip_string('('):
self.fail('expected parentheses for function')
args = []
while 1:
self.skip_ws()
if self.eof:
self.fail('missing closing parentheses')
if self.skip_string(')'):
break
if args:
if not self.skip_string(','):
self.fail('expected comma between arguments')
self.skip_ws()
argname = self._parse_type()
argtype = default = None
self.skip_ws()
if self.skip_string('='):
self.pos += 1
default = self._parse_default_expr()
elif self.current_char not in ',)':
argtype = argname
argname = self._parse_name()
self.skip_ws()
if self.skip_string('='):
default = self._parse_default_expr()
args.append(ArgumentDefExpr(argtype, argname, default))
self.skip_ws()
const = self.skip_word('const')
if const:
self.skip_ws()
if self.skip_string('='):
self.skip_ws()
if not (self.skip_string('0') or \
self.skip_word('NULL') or \
self.skip_word('nullptr')):
self.fail('pure virtual functions must be defined with '
'either 0, NULL or nullptr, other macros are '
'not allowed')
pure_virtual = True
else:
pure_virtual = False
return args, const, pure_virtual
def _parse_visibility_static(self):
visibility = ''
if self.match(_visibility_re):
visibility = self.matched_text
static = self.skip_word('static')
return visibility, static
def parse_type(self):
return self._parse_type()
def parse_type_object(self):
visibility, static = self._parse_visibility_static()
typename = self._parse_type()
self.skip_ws()
if not self.eof:
name = self._parse_type()
else:
name = typename
typename = None
return TypeObjDefExpr(name, visibility, static, typename)
def parse_member_object(self):
visibility, static = self._parse_visibility_static()
typename = self._parse_type()
name = self._parse_type()
self.skip_ws()
if self.skip_string('='):
value = self.read_rest().strip()
else:
value = None
return MemberObjDefExpr(name, visibility, static, typename, value)
def parse_function(self):
visibility, static = self._parse_visibility_static()
if self.skip_word('explicit'):
explicit = True
self.skip_ws()
else:
explicit = False
rv = self._parse_type()
self.skip_ws()
# some things just don't have return values
if self.current_char == '(':
name = rv
rv = None
else:
name = self._parse_type()
return FuncDefExpr(name, visibility, static, explicit, rv,
*self._parse_signature())
def parse_class(self):
visibility, static = self._parse_visibility_static()
return ClassDefExpr(self._parse_type(), visibility, static)
def read_rest(self):
rv = self.definition[self.pos:]
self.pos = self.end
return rv
def assert_end(self):
self.skip_ws()
if not self.eof:
self.fail('expected end of definition, got %r' %
self.definition[self.pos:])
class PHPObject(ObjectDescription):
"""Description of a PHP language object."""
def attach_name(self, node, name):
owner, name = name.split_owner()
varname = unicode(name)
if owner is not None:
owner = unicode(owner) + '::'
node += addnodes.desc_addname(owner, owner)
node += addnodes.desc_name(varname, varname)
def attach_type(self, node, type):
# XXX: link to c?
text = unicode(type)
pnode = addnodes.pending_xref(
'', refdomain='php', reftype='type',
reftarget=text, modname=None, classname=None)
pnode['php:parent'] = self.env.temp_data.get('php:parent')
pnode += nodes.Text(text)
node += pnode
def attach_modifiers(self, node, obj):
if obj.visibility != 'public':
node += addnodes.desc_annotation(obj.visibility,
obj.visibility)
node += nodes.Text(' ')
if obj.static:
node += addnodes.desc_annotation('static', 'static')
node += nodes.Text(' ')
def add_target_and_index(self, sigobj, sig, signode):
theid = sigobj.get_id()
name = unicode(sigobj.name)
signode['names'].append(theid)
signode['ids'].append(theid)
signode['first'] = (not self.names)
self.state.document.note_explicit_target(signode)
self.env.domaindata['php']['objects'].setdefault(name,
(self.env.docname, self.objtype, theid))
indextext = self.get_index_text(name)
if indextext:
self.indexnode['entries'].append(fixup_index_entry(('single', indextext, name, name, 'foobar')))
def before_content(self):
lastname = self.names and self.names[-1]
if lastname and not self.env.temp_data.get('php:parent'):
assert isinstance(lastname, NamedDefExpr)
self.env.temp_data['php:parent'] = lastname.name
self.parentname_set = True
else:
self.parentname_set = False
def after_content(self):
if self.parentname_set:
self.env.temp_data['php:parent'] = None
def parse_definition(self, parser):
raise NotImplementedError()
def describe_signature(self, signode, arg):
raise NotImplementedError()
def handle_signature(self, sig, signode):
parser = DefinitionParser(sig)
try:
rv = self.parse_definition(parser)
parser.assert_end()
except DefinitionError, e:
self.env.warn(self.env.docname,
e.description, self.lineno)
raise ValueError
self.describe_signature(signode, rv)
parent = self.env.temp_data.get('php:parent')
if parent is not None:
rv = rv.clone()
rv.name = rv.name.prefix(parent)
return rv
class PHPClassObject(PHPObject):
def get_index_text(self, name):
return _('%s (PHP class)') % name
def parse_definition(self, parser):
return parser.parse_class()
def describe_signature(self, signode, cls):
self.attach_modifiers(signode, cls)
signode += addnodes.desc_annotation('class ', 'class ')
self.attach_name(signode, cls.name)
class PHPTypeObject(PHPObject):
def get_index_text(self, name):
if self.objtype == 'type':
return _('%s (PHP type)') % name
return ''
def parse_definition(self, parser):
return parser.parse_type_object()
def describe_signature(self, signode, obj):
self.attach_modifiers(signode, obj)
signode += addnodes.desc_annotation('type ', 'type ')
if obj.typename is not None:
self.attach_type(signode, obj.typename)
signode += nodes.Text(' ')
self.attach_name(signode, obj.name)
class PHPMemberObject(PHPObject):
def get_index_text(self, name):
if self.objtype == 'member':
return _('%s (PHP member)') % name
return ''
def parse_definition(self, parser):
return parser.parse_member_object()
def describe_signature(self, signode, obj):
self.attach_modifiers(signode, obj)
self.attach_type(signode, obj.typename)
signode += nodes.Text(' ')
self.attach_name(signode, obj.name)
if obj.value is not None:
signode += nodes.Text(u' = ' + obj.value)
class PHPFunctionObject(PHPObject):
def attach_function(self, node, func):
owner, name = func.name.split_owner()
if owner is not None:
owner = unicode(owner) + '::'
node += addnodes.desc_addname(owner, owner)
# cast operator is special. in this case the return value
# is reversed.
if isinstance(name, CastOpDefExpr):
node += addnodes.desc_name('operator', 'operator')
node += nodes.Text(u' ')
self.attach_type(node, name.typename)
else:
funcname = unicode(name)
node += addnodes.desc_name(funcname, funcname)
paramlist = addnodes.desc_parameterlist()
for arg in func.signature:
param = addnodes.desc_parameter('', '', noemph=True)
if arg.type is not None:
self.attach_type(param, arg.type)
param += nodes.Text(u' ')
param += nodes.emphasis(unicode(arg.name), unicode(arg.name))
if arg.default is not None:
def_ = u'=' + unicode(arg.default)
param += nodes.emphasis(def_, def_)
paramlist += param
node += paramlist
if func.const:
node += addnodes.desc_addname(' const', ' const')
if func.pure_virtual:
node += addnodes.desc_addname(' = 0', ' = 0')
def get_index_text(self, name):
return _('%s (PHP function)') % name
def parse_definition(self, parser):
return parser.parse_function()
def describe_signature(self, signode, func):
self.attach_modifiers(signode, func)
if func.explicit:
signode += addnodes.desc_annotation('explicit', 'explicit')
signode += nodes.Text(' ')
# return value is None for things with a reverse return value
# such as casting operator definitions or constructors
# and destructors.
if func.rv is not None:
self.attach_type(signode, func.rv)
signode += nodes.Text(u' ')
self.attach_function(signode, func)
class PHPCurrentNamespace(Directive):
"""This directive is just to tell Sphinx that we're documenting
stuff in namespace foo.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
def run(self):
env = self.state.document.settings.env
if self.arguments[0].strip() in ('NULL', '0', 'nullptr'):
env.temp_data['php:prefix'] = None
else:
parser = DefinitionParser(self.arguments[0])
try:
prefix = parser.parse_type()
parser.assert_end()
except DefinitionError, e:
self.env.warn(self.env.docname,
e.description, self.lineno)
else:
env.temp_data['php:prefix'] = prefix
return []
class PHPXRefRole(XRefRole):
def process_link(self, env, refnode, has_explicit_title, title, target):
refnode['php:parent'] = env.temp_data.get('php:parent')
if not has_explicit_title:
target = target.lstrip('~') # only has a meaning for the title
# if the first character is a tilde, don't display the module/class
# parts of the contents
if title[:1] == '~':
title = title[1:]
dcolon = title.rfind('::')
if dcolon != -1:
title = title[dcolon + 2:]
return title, target
class PHPDomain(Domain):
"""PHP language domain."""
name = 'php'
label = 'PHP'
object_types = {
'class': ObjType(l_('class'), 'class'),
'function': ObjType(l_('function'), 'func'),
'member': ObjType(l_('member'), 'member'),
'type': ObjType(l_('type'), 'type')
}
directives = {
'class': PHPClassObject,
'function': PHPFunctionObject,
'member': PHPMemberObject,
'type': PHPTypeObject,
'namespace': PHPCurrentNamespace
}
roles = {
'class': PHPXRefRole(),
'func' : PHPXRefRole(fix_parens=True),
'member': PHPXRefRole(),
'type': PHPXRefRole()
}
initial_data = {
'objects': {}, # fullname -> docname, objtype
}
def clear_doc(self, docname):
for fullname, (fn, _, _) in self.data['objects'].items():
if fn == docname:
del self.data['objects'][fullname]
def resolve_xref(self, env, fromdocname, builder,
typ, target, node, contnode):
def _create_refnode(expr):
name = unicode(expr)
if name not in self.data['objects']:
return None
obj = self.data['objects'][name]
if obj[1] not in self.objtypes_for_role(typ):
return None
return make_refnode(builder, fromdocname, obj[0], obj[2],
contnode, name)
parser = DefinitionParser(target)
# XXX: warn?
try:
expr = parser.parse_type().get_name()
parser.skip_ws()
if not parser.eof or expr is None:
return None
except DefinitionError:
return None
parent = node['php:parent']
rv = _create_refnode(expr)
if rv is not None or parent is None:
return rv
parent = parent.get_name()
rv = _create_refnode(expr.prefix(parent))
if rv is not None:
return rv
parent, name = parent.split_owner()
return _create_refnode(expr.prefix(parent))
def get_objects(self):
for refname, (docname, type, theid) in self.data['objects'].iteritems():
yield (refname, refname, type, docname, refname, 1)
def setup(app):
app.add_domain(PHPDomain)
| [
"[email protected]"
] | |
15fc22e8fd23bf75543afca8ce167e6017251fa0 | fb1e852da0a026fb59c8cb24aeb40e62005501f1 | /decoding/GAD/fairseq/dataclass/constants.py | 93bc6d03cb81618c47a58009dc22f7953a106eb3 | [
"LicenseRef-scancode-unknown-license-reference",
"LGPL-2.1-or-later",
"LicenseRef-scancode-free-unknown",
"Apache-2.0",
"MIT"
] | permissive | microsoft/unilm | 134aa44867c5ed36222220d3f4fd9616d02db573 | b60c741f746877293bb85eed6806736fc8fa0ffd | refs/heads/master | 2023-08-31T04:09:05.779071 | 2023-08-29T14:07:57 | 2023-08-29T14:07:57 | 198,350,484 | 15,313 | 2,192 | MIT | 2023-08-19T11:33:20 | 2019-07-23T04:15:28 | Python | UTF-8 | Python | false | false | 1,626 | py | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from enum import Enum, EnumMeta
from typing import List
class StrEnumMeta(EnumMeta):
# this is workaround for submitit pickling leading to instance checks failing in hydra for StrEnum, see
# https://github.com/facebookresearch/hydra/issues/1156
@classmethod
def __instancecheck__(cls, other):
return "enum" in str(type(other))
class StrEnum(Enum, metaclass=StrEnumMeta):
def __str__(self):
return self.value
def __eq__(self, other: str):
return self.value == other
def __repr__(self):
return self.value
def __hash__(self):
return hash(str(self))
def ChoiceEnum(choices: List[str]):
"""return the Enum class used to enforce list of choices"""
return StrEnum("Choices", {k: k for k in choices})
LOG_FORMAT_CHOICES = ChoiceEnum(["json", "none", "simple", "tqdm"])
DDP_BACKEND_CHOICES = ChoiceEnum([
"c10d", # alias for pytorch_ddp
"legacy_ddp",
"no_c10d", # alias for legacy_ddp
"pytorch_ddp",
"slow_mo",
])
DATASET_IMPL_CHOICES = ChoiceEnum(["raw", "lazy", "cached", "mmap", "fasta"])
GENERATION_CONSTRAINTS_CHOICES = ChoiceEnum(["ordered", "unordered"])
GENERATION_DECODING_FORMAT_CHOICES = ChoiceEnum(
["unigram", "ensemble", "vote", "dp", "bs"]
)
ZERO_SHARDING_CHOICES = ChoiceEnum(["none", "os"])
PIPELINE_CHECKPOINT_CHOICES = ChoiceEnum(["always", "never", "except_last"])
PRINT_ALIGNMENT_CHOICES = ChoiceEnum(["hard", "soft"])
| [
"[email protected]"
] | |
226980fdf20772f3a2d26e3b993584790ded886b | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/299/100769/submittedfiles/testes.py | b90b88a3a2fbbabb9a6af0cc8e965ec6c94201cb | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,871 | py | from minha_bib import verificar_vitoria
from minha_bib import sorteio
from minha_bib import sorteio2
from minha_bib import maquinainteligente
import time
c=0
tabuleiro=[[1,2,3],[1,2,3],[1,2,3]]
for i in range(0,3,1):
for j in range(0,3,1):
tabuleiro[i][j]=" "
print('---------------------------------------')
print('JOGO DA VELHA')
print('Olá\nSeja Bem Vindo ao jogo da velha!')
#JOGO ENTRE DUAS PESSOAS
nome1=str(input('Qual seu nome(ou apelido)? '))
'''nome2=str(input('Qual o nome do segundo jogador? '))'''
s1=str(input('Qual símbolo você deseja utilizar,'+nome1+'?[X/O]'))
if s1=='X':
s2='O'
'''print('Ok, vamos começar,'+nome2+' ficará com "O"')'''
else:
s2='X'
'''print('Ok, vamos começar,'+nome2+'ficará com "X"')'''
print('Esse é o nosso tabuleiro \n',tabuleiro[0][0],'|',tabuleiro[0][1],'|',tabuleiro[0][2],'\n',tabuleiro[1][0],'|',tabuleiro[1][1],'|',tabuleiro[1][2],'\n',tabuleiro[2][0],'|',tabuleiro[2][1],'|',tabuleiro[2][2])
print('Você vai me informar a casa que quer jogar com números.\n E cada um desses números representa as seguintes casas:')
print('00 | 01 | 02\n10 | 11 | 12\n20 | 21 | 22')
print('E aí eu vou lá e substituo a casa pelo seu símbolo, por exemplo:\nO você me informa a seguinte jogada: 22')
print('Eu vou lá e...')
print('',tabuleiro[0][0],'|',tabuleiro[0][1],'|',tabuleiro[0][2],'\n',tabuleiro[1][0],'|',tabuleiro[1][1],'|',tabuleiro[1][2],'\n',tabuleiro[2][0],'|',tabuleiro[2][1],'|',s2)
print('----------------------------------------------')
#COMEÇO DO JOGO
inicio=sorteio(0,1)
if inicio==0:
inicio=str('Usuário')
else:
inicio=str('Máquina')
print('O vencedor do sorteio para incio foi '+inicio)
if inicio=='Usuário':
print('Então você começa')
k=0
while k<10:
k+=1
if k%2!=0:
jogada=str(input('Qual a sua jogada '+nome1+'?'))
i=jogada[0]
j=jogada[1]
i=int(i)
j=int(j)
while tabuleiro[i][j]!=" ":
print('Jogada inválida')
jogada=str(input('Qual a sua jogada?'))
i=jogada[0]
j=jogada[1]
i=int(i)
j=int(j)
tabuleiro[i][j]=s1
print('',tabuleiro[0][0],'|',tabuleiro[0][1],'|',tabuleiro[0][2],'\n',tabuleiro[1][0],'|',tabuleiro[1][1],'|',tabuleiro[1][2],'\n',tabuleiro[2][0],'|',tabuleiro[2][1],'|',tabuleiro[2][2])
if verificar_vitoria(tabuleiro)==True:
print('PARABÉNS,VOCÊ VENCEU')
break
elif k%2==0:
print('Minha vez')
time.sleep(1)
x=str(maquinainteligente(tabuleiro))
i=int(x[0])
j=int(x[1])
while tabuleiro[i][j]!=' ':
i=int(sorteio2(0,2))
j=int(sorteio2(0,2))
tabuleiro[i][j]
tabuleiro[i][j]=s2
print('',tabuleiro[0][0],'|',tabuleiro[0][1],'|',tabuleiro[0][2],'\n',tabuleiro[1][0],'|',tabuleiro[1][1],'|',tabuleiro[1][2],'\n',tabuleiro[2][0],'|',tabuleiro[2][1],'|',tabuleiro[2][2])
if verificar_vitoria(tabuleiro)==True:
print('Ahh, não foi dessa vez')
break
elif inicio=='Máquina':
print('Então eu começo')
for k in range(1,10,1):
if k%2!=0:
print('Minha vez')
time.sleep(1)
x=str(maquinainteligente(tabuleiro))
i=int(x[0])
j=int(x[1])
while tabuleiro[i][j]!=' ':
i=int(sorteio2(0,2))
j=int(sorteio2(0,2))
tabuleiro[i][j]
tabuleiro[i][j]=s2
print('',tabuleiro[0][0],'|',tabuleiro[0][1],'|',tabuleiro[0][2],'\n',tabuleiro[1][0],'|',tabuleiro[1][1],'|',tabuleiro[1][2],'\n',tabuleiro[2][0],'|',tabuleiro[2][1],'|',tabuleiro[2][2])
if verificar_vitoria(tabuleiro)==True:
print('Ahh, não foi dessa vez')
break
elif k%2==0:
jogada=str(input('Qual a sua jogada '+nome1+'?'))
i=jogada[0]
j=jogada[1]
i=int(i)
j=int(j)
while tabuleiro[i][j]!=" ":
print('Jogada inválida')
jogada=str(input('Qual a sua jogada?'))
i=jogada[0]
j=jogada[1]
i=int(i)
j=int(j)
tabuleiro[i][j]=s1
print('',tabuleiro[0][0],'|',tabuleiro[0][1],'|',tabuleiro[0][2],'\n',tabuleiro[1][0],'|',tabuleiro[1][1],'|',tabuleiro[1][2],'\n',tabuleiro[2][0],'|',tabuleiro[2][1],'|',tabuleiro[2][2])
if verificar_vitoria(tabuleiro)==True:
print('PARABÉNS,VOCÊ VENCEU')
break
elif k==9 and verificar_vitoria(tabuleiro)==False:
print('ihhhh, Deu velha')
| [
"[email protected]"
] | |
891cf68c8f2e5a2d7b2c3c9baf3fd45f36ba1c93 | 3e3a835ee885eb9a71fd35ea58acd04361f72f47 | /python基础/复习.py/石头剪刀布.py | df86dfa2ef1429a31cb3268c524f245a54ab4e82 | [] | no_license | hanfang302/py- | dbb259f24e06fbe1a900df53ae6867acb8cb54ea | dd3be494ccef5100c0f06ed936f9a540d8ca0995 | refs/heads/master | 2020-03-16T01:59:57.002135 | 2018-05-07T12:02:21 | 2018-05-07T12:02:21 | 132,454,341 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 337 | py | player = int(input('请出拳 石头(1),剪刀(2),布(3):'))
computer = 2
if ((player == 1 and computer == 2) or
(player == 2 and computer == 3) or
(player == 3 and computer == 1)):
print('电脑输了')
elif player == computer:
print('心有灵犀,再来一局')
else:
print('不行,我要和你决战到底')
| [
"[email protected]"
] | |
5b86d1ba8124f7ae022306cd7979e8aa97754314 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /HdrqkdT4r9DeKPjCM_15.py | b8f9cf3d649052ff9b6b798b8d9e233d02626467 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 400 | py |
def is_polygonal(n):
if n==1:
return "0th of all"
if n <= 3:
return False
list = []
for k in range(3, n):
i=1
current=k+1
while current < n:
i+=1
current += k*i
if current == n:
i = str(i)
i += "th" if i[-2:-1]=="1" else {"1":"st","2":"nd","3":"rd"}.get(i[-1],"th")
list.append("{ith} {k}-gonal number".format(ith=i,k=k))
return list
| [
"[email protected]"
] | |
1b8532d3421a9dd5536b1e0debfc39c16e37a6c3 | 1bccf7d57c7aa8d48b84fff187de4b6ff2599cb6 | /pandora_common/state_manager/scripts/state_manager/__init__.py | 6d30fa3d4c6b665f9f74250df0145ce48aae504d | [] | no_license | skohlbr/pandora_ros_pkgs | 733ed34edb5b6d46e59df4acb01288f28ef3b50f | eecaf082b47e52582c5f009eefbf46dd692aba4f | refs/heads/indigo-devel | 2021-01-21T18:06:14.967943 | 2015-11-04T15:08:03 | 2015-11-04T15:08:03 | 53,413,573 | 0 | 1 | null | 2016-03-08T13:19:40 | 2016-03-08T13:19:40 | null | UTF-8 | Python | false | false | 37 | py | from state_client import StateClient
| [
"[email protected]"
] | |
c3abe5035eada595291caa229e664159b4743cb2 | e9ef3cd143478660d098668a10e67544a42b5878 | /Lib/corpuscrawler/crawl_thk.py | f49f58ce0e90e3a983f847f9a2de5a9de94840a2 | [
"Apache-2.0"
] | permissive | google/corpuscrawler | a5c790c19b26e6397b768ce26cf12bbcb641eb90 | 10adaecf4ed5a7d0557c8e692c186023746eb001 | refs/heads/master | 2023-08-26T04:15:59.036883 | 2022-04-20T08:18:11 | 2022-04-20T08:18:11 | 102,909,145 | 119 | 40 | NOASSERTION | 2022-04-20T08:18:12 | 2017-09-08T22:21:03 | Python | UTF-8 | Python | false | false | 809 | py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, print_function, unicode_literals
from corpuscrawler.util import crawl_bibleis
def crawl(crawler):
out = crawler.get_output(language='thk')
crawl_bibleis(crawler, out, bible='THKBTL')
| [
"[email protected]"
] | |
dd8266083726914414d608f3cacd125395994324 | 7ef29543c9e8305f181084cede03d8cec50508f1 | /docker_vnc_immutable/immutableworkstation3.py | 61d3ee59bb7a0be59a34ef246f878368298cc05d | [
"MIT"
] | permissive | mikadosoftware/workstation | 6168ba7f8f8357d73e7792a3c65c0daec37222e7 | 9c8b19bc5d6c596843da30f58f1dad6a60c7e989 | refs/heads/master | 2023-02-21T03:45:54.209770 | 2023-02-08T08:41:36 | 2023-02-08T08:41:36 | 138,070,951 | 477 | 29 | MIT | 2023-02-07T21:53:32 | 2018-06-20T18:28:07 | Python | UTF-8 | Python | false | false | 13,917 | py | #!/usr/bin/python3
#! -*- coding:utf-8 -*-
"""
ImmutableWorkstation
====================
This is a single entry point for the `immutableworkstation` project.
The project is pretty simple - I want to have a consistent, immutable
workstation on any host machine I am developing on - so I am using a
docker instance on a host machine - the instance is my development
"machine", and it can be rebuilt from consistent templates - this
script helps control all that - its supposed to be easier to get
started than a bunch of poorly documneted shell scripts.
* the start and stopping of the dev instance.
* the compilation of the docker image
* vsarious config and templates used to build to docker image.
This script does quite a lot, and needs to be installed on
the host machine - do so using
pip3 install docopt
python3 setup.py install
(I will launch it on PyPI soon)
Once this is done, you should be able to run
./immutableworkstation.py
usage
-----
We expect to have a config .ini file. This is for ease of specifying things like
volume mappings.
By default the config file is at `~/immuntableworkstation/config.ini`
[ ] Implement expect-style testing so we can automate testing.
[x] put the home dir into git seperate to rest of pacakge (ie thats the indivudal part)
[ ] put blog.mikadosoftware onto AWS and run this testing with docker on it.
[ ] migrate rest of the articles there.
[x] create a plain docker instance and just import devstation, see if it works (ie clean install)
[ ] run the get github projects into one place
[ ] podman system prune : clean up a lot of cruft in docker areas.
[x] remove priviledged access with auser name remapping
[ ] improve using https://github.com/mviereck/x11docker
"""
##### imports #####
import logging, sys
from docopt import docopt
import subprocess
import time
import os
from pprint import pprint as pp
from mikado.core import config
import shutil
import json
import lib_config
import operator
##### Module setup #####
# TODO: split out logging into common module
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.INFO)
log.addHandler(handler)
DRYRUN = False
PDB = False
OCI_CMD = 'sudo docker'
OCI_CMD = 'podman'
#: usage defintons
DOCOPT_HELP = """immutableworkstation
Usage:
immutableworkstation.py showconfig [options]
immutableworkstation.py createDockerfile --templatedir=<path> [options]
immutableworkstation.py start tagname [options]
immutableworkstation.py stop tagname [options]
immutableworkstation.py login tagname [options]
immutableworkstation.py buildAnyDocker <path_to_dockerfile> <context_dir> [options]
immutableworkstation.py status
immutableworkstation.py test
immutableworkstation.py (-h | --help )
Options:
-h --help Show this screen
-d --dryrun dryrun
--configfile=<configpath> path 2 config ini file
--tagname=<tagname> Name to tag
--instancename=<instancename>
--username=<username>
--volumearray=<volumearray>
"""
def parse_docopt(argsd):
'''We want to split into args (<val>), options (--left) and commands (foo.py fire) '''
args = []
options = []
commands = []
active_commmands = []
# we assume only one command at a time?
for k,i in argsd.items():
if k.startswith("--"):
options.append({k:i})
elif k.startswith("<"):
args.append({k:i})
else:
commands.append({k:i})
#
active_commands = [list(d.keys())[0] for d in commands if list(d.values())[0]]
return args, options, commands, active_commands
############### Config
def build_sshcmd():
"""Create the command used to connect to running docker via ssh."""
return "ssh -X {username}@{localhost} -p {ssh_port}".format(**CONFD)
def build_dockerrun(latest=True):
"""create the command used to start docker instance.
tagname of image
name of running instance
"""
_latest = LATEST if latest else NEXT
instance_name = "run_{}_{}".format(CONFD["instance_name"], _latest)
image_name = "{}:{}".format(CONFD["tagname"], _latest)
vols = ""
for hostpath, mountpath in CONFD["volumes"].items():
vols += "-v {}:{} ".format(hostpath, mountpath)
return [
"{} container prune -f".format(OCI_CMD),
"""{OCI_CMD} run -d \
{vols} \
--name {instance_name} \
--device /dev/snd \
-p {ssh_port}:22 \
--privileged \
{tagname}:{_latest}
""".format(
OCI_CMD=OCI_CMD,
vols=vols,
instance_name=instance_name,
ssh_port=CONFD["ssh_port"],
_latest=_latest,
tagname=CONFD["tagname"],
),
]
def build_docker_build(latest=True):
"""Create command used to (re)build the container.
We store the Dockerfile (as that name)
in dir .next or .latest so that we can
have various templates and assets and so on
in the 'context' directory.
"""
tmpl = "{} build -t {{tagname}}:{{tagtag}} {{pathtodockerfile}} --squash".format(OCI_CMD)
_latest = LATEST if latest else NEXT
pathtodockerfile = os.path.join(CONFD["devstation_config_root"], "." + _latest)
return tmpl.format(
tagname=CONFD["tagname"], tagtag=_latest, pathtodockerfile=pathtodockerfile
)
def build_docker_any_build(path_to_dockerfile, context_dir):
"""Create command used to (re)build the container.
"""
tmpl = "{} build -t {{tagname}}:{{tagtag}} -f {{path_to_dockerfile}} {{context_dir}} --squash".format(OCI_CMD)
return tmpl.format(
tagname='anybuild', tagtag='0.1', path_to_dockerfile=path_to_dockerfile, context_dir=context_dir
)
def read_subprocess(cmd):
"""Run a command and return output """
result = subprocess.run(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
shell=True)
txt = result.stdout
return txt
def run_subprocess(cmd, shell=None):
"""Run the given command in a subprocess."""
if DRYRUN:
telluser(cmd)
else:
log.info(cmd)
subprocess.run(cmd, shell=True)
def spawn_sibling_console():
"""This script is best thought of as a launcher for other shells we
shall be working in. We want to interact with the console, not
this script much.
I have played with fork'ing a child console, then passing `fd`
0,1,2 over to it. But the easiest way seems to be to assume this
is a GUI workstation, and people are using a terminal program
(like Konsole) - so we just spawn konsole and run -e
"""
sshcmd = '{} "{}" &'.format(CONFD["terminal_command"], build_sshcmd())
log.info(sshcmd)
run_subprocess(sshcmd)
def handle_start(args):
"""Perform cmsd needed to start the docker and login
I really need to monitor the success of the underlying
cmds, instead of brute force sleep.
[ ] {milestone} stop using sleep, monitor the subprocess for return values.
"""
# do start up here
cmds = build_dockerrun(args["latest"])
for cmd in cmds:
# TODO get better solution than sleep
run_subprocess(cmd, shell=True)
time.sleep(8) # brute force give docker time to complete its stuff.
time.sleep(10) # As above, but let docker catch up before login
handle_login(args)
############### Config
# This is a 'well-known' location
CONFIGDIR = os.path.join(os.path.expanduser("~"), ".immutableworkstation")
CONFIGLOCATION = os.path.join(
os.path.expanduser("~"), ".immutableworkstation/config.ini"
)
def handle_showconfig(args):
print(args['--configfile'])
#lib_config.show_config(confd=CONFD)
def handle_login(args):
spawn_sibling_console()
def handle_createDockerfile(args):
makeDocker(args['--templatedir'])
def handle_buildDocker(args):
"""Trigger the processes to create new dockerfile and then build image. """
makeDocker(latest=args["latest"])
cmd = build_docker_build(latest=args["latest"])
run_subprocess(cmd)
def parse_volumearray(args):
''' COnvert volumne array to usable instructions
>>> parse_volumearray(args)
'''
x = ['~/data=/var/data',
'~/projects=/var/projects',
'~/secrets=/var/secrets:ro',
'~/Dropbox=/var/Dropbox']
return x
def handle_buildAnyDocker(args):
"""Trigger the processes to create new dockerfile and then build image. """
#import pdb;pdb.set_trace()
cmd = build_docker_any_build(args['<path_to_dockerfile>'], args['<context_dir>'])
run_subprocess(cmd)
def handle_status(args):
"""Show container status. """
cmd = "{} container ls".format(OCI_CMD)
run_subprocess(cmd)
cmd = "{} inspect run_devstation_next".format(OCI_CMD)
txt = read_subprocess(cmd)
jsond = json.loads(txt)
ipaddress = jsond[0]['NetworkSettings']['IPAddress']
print('Use this ip address {}'.format(ipaddress))
def handle_stop(args):
"""Kill the specified instance. """
_latest = LATEST if args["latest"] else NEXT
#: rewrite so this is not in two places
instance_name = "run_{}_{}".format(CONFD["instance_name"], _latest)
cmd = "{} container kill {}".format(OCI_CMD, instance_name)
run_subprocess(cmd)
def hasValidConfig():
"""This is a placeholder for future development on checking curr env. """
has_config_file = os.path.isfile(CONFIGLOCATION)
return all([has_config_file])
def gatherinfo():
questions = {
"username": "What username should be the default (only) on your immutable workstation?"
}
answers = {}
for label, question in questions.items():
answer = input(question)
answers[label] = answer
return answers
def handle_quickstart(args):
"""We have a starter config on github. Pull that down and put in
users homedir, then alter based on questions.
I am spending too long yak shaving on this app, and so will just
print instructions and look to automate it later.
"""
helpmsg = ""
if hasValidConfig():
helpmsg += """You appear to have an existing config in {}.
Please adjust it manually - view docs for help.""".format(
CONFIGLOCATION
)
if not hasValidConfig():
helpmsg += """ In the future this app will walk you through a series of
questions, but for now please can you download and unzip into {} the
starter config stored at {}. You should have a directory layout like::
.immutableworkstation
|
-config.ini
|
-.next/
-.latest/
You should copy these into *your* github repo, and then update the
templates to your needs, as you find a new package to be added to your
workstation, adjust the config needed.
""".format(
CONFIGDIR, STARTER_CONFIG_URL
)
telluser(helpmsg)
def handle_unknown(command, e, args):
telluser(f"Unknown request. We got command: {command} and error: {e}. Full args were {args}")
def makeDocker(templatesdir):
"""Take a .skeleton file, and replace defined markup with
contents of txt files
Based on 'dockerfile.skeleton', replace any instance of
{{ python }} with the contents of file `templates\python.template`
This is an *extremely* simple templating tool. It is *not*
supposed to have the complexity even of Jinja2. Its supposed to
be really dumb. Lucky I wrote it then :-).
"""
pathtodockerfile = os.path.join(templatesdir, "../Dockerfile")
skeleton = "dockerfile.skeleton"
outputs = ""
with open(os.path.join(templatesdir, skeleton)) as fo:
for line in fo:
if line.find("{{") == 0:
file = line.replace("{{", "").replace("}}", "").strip()
filepath = os.path.join(templatesdir, file + ".template")
txt = open(filepath).read()
outputs += "\n### {}\n{}\n".format(line, txt)
else:
outputs += "{}".format(line)
fo = open(pathtodockerfile, "w")
fo.write(outputs)
fo.close()
telluser("Written new Dockerfile at {}".format(pathtodockerfile))
def telluser(msg):
""" aggregate print stmts into one place."""
# handle my weird formatting
print(msg)
def build_current_confd(args, options, commands, active_commands):
print("args", args, '----\n')
print("options", options, '----\n')
print("commands", commands, '----\n')
print("active commands", active_commands, '----\n')
volumes = parse_volumearray(options)
import sys; sys.exit()
def run(argsd):
#: start with quickstart as it may be our only options
#: [ ] make this safer with .get
args, options, commands, active_commands = parse_docopt(argsd)
build_current_confd(args, options, commands, active_commands)
for active_command in active_commands:
try:
# in current module, prepend handle_ to the name of the active command and
# look for that in current module, if it exists, call it
current_module = sys.modules[__name__]
fn = operator.attrgetter('handle_{}'.format(active_command))(current_module)
fn.__call__(argsd)
except Exception as e:
handle_unknown(active_command, e, argsd)
def runtests():
import doctest
doctest.testmod()
teststr = '''
[default]
tagname = workstation
instance_name = devstation
localhost = 127.0.0.1
username = pbrian
ssh_port = 2222
terminal_command = /usr/bin/konsole -e
volume_array: ~/secrets=/var/secrets:ro ~/secrets2=/var/secrets2:ro
'''
def main():
global DRYRUN
args = docopt(DOCOPT_HELP)
if args.get("--dryrun", False):
DRYRUN = True
run(args)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
a2a3823e6435408a754b473b37f7233309d5ef3f | 4754d6b05b7eb255983f58474164d8690f4d8684 | /figurines/tests/test_views.py | 4ad1ab56cb491358a3a1c8c3bb9812ce62ef1085 | [] | no_license | pythonmentor/benjamin-p13 | 4f629be3cd9b2e8af6934fb69dfca63d6a294346 | ada744761d3a3c6ecde1aec5db20770960cb2146 | refs/heads/master | 2023-01-24T17:10:30.235330 | 2020-11-30T17:29:09 | 2020-11-30T17:29:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,155 | py | from django.test import TestCase
from django.urls import reverse
from figurines.models import Category, DidYouSee, Figurine
from users.models import User
class FigurineTestViews(TestCase):
def setUp(self):
self.user_test = User.objects.create_user(
username="UserTest", password="PaswordOfTheTest&120"
)
category_figurine = Category.objects.create(
name="super heroes"
)
figurine = Figurine.objects.create(
figurine_number="1",
category=category_figurine,
name="batman"
)
figurine.user.add(self.user_test)
return super().setUp()
def test_figurine_add_figurine(self):
self.client.login(username="UserTest", password="PaswordOfTheTest&120")
response = self.client.post(
"/figurines/add_figurine/",
{"figurine_number": "31", "category": "World of Warcraft", "name": "Thrall"},
)
self.assertEqual(response.status_code, 302)
self.assertTemplateUsed('figurines/collection.html')
def test_figurine_collection_user(self):
self.client.login(username="UserTest", password="PaswordOfTheTest&120")
response = self.client.get('/figurines/collection/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('figurines/collection.html')
def test_figurine_search_with_all_figurines(self):
self.client.login(username="UserTest", password="PaswordOfTheTest&120")
user = User.objects.get(username="UserTest")
response = self.client.get('/figurines/search/?all=all')
user_figurine = user.figurine_set.all()
self.assertQuerysetEqual(
response.context['figurines_list'],
[repr(figurine) for figurine in user_figurine]
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('figurines/search.html')
def test_figurine_search_without_all_figurines(self):
self.client.login(username="UserTest", password="PaswordOfTheTest&120")
user = User.objects.get(username="UserTest")
user_figurine = user.figurine_set.all().delete()
response = self.client.get('/figurines/search/?all=all')
self.assertFalse(response.context['figurines_list'])
self.assertContains(response, 'Pas de résultat.')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('figurines/search.html')
def test_figurine_search_with_figurines(self):
self.client.login(username="UserTest", password="PaswordOfTheTest&120")
user = User.objects.get(username="UserTest")
response = self.client.get('/figurines/search/?q=batman')
user_figurine = user.figurine_set.filter(name__icontains='batman')
self.assertQuerysetEqual(
response.context['figurines_list'],
[repr(figurine) for figurine in user_figurine]
)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('figurines/search.html')
def test_figurine_search_without_all_figurines(self):
self.client.login(username="UserTest", password="PaswordOfTheTest&120")
user = User.objects.get(username="UserTest")
user_figurine = user.figurine_set.filter(name__icontains='batman').delete()
response = self.client.get('/figurines/search/?q=batman')
self.assertFalse(response.context['figurines_list'])
self.assertContains(response, 'Pas de résultat.')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('figurines/search.html')
def test_figurine_did_you_see(self):
self.client.login(username="UserTest", password="PaswordOfTheTest&120")
response = self.client.get("/figurines/did_you_see/")
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed("figurines/did_you_see.html")
def test_create_question(self):
self.client.login(username="UserTest", password="PaswordOfTheTest&120")
response = self.client.post(
"/figurines/create_question",
{
"title": "Je recherche batman",
"text": "Bonjour, je recherche Batman",
"date": "03/07/2020",
},
)
self.assertRedirects(response, '/figurines/did_you_see/')
response = self.client.get('/figurines/did_you_see/')
self.assertContains(response, 'Je recherche batman')
self.assertTemplateUsed('figurines/did_you_see.html')
def test_can_respond_to_question(self):
self.client.login(username="UserTest", password="PaswordOfTheTest&120")
response = self.client.post(
"/figurines/create_question",
{
"title": "Je recherche batman2",
"text": "Bonjour, je recherche Batman2",
"date": "03/07/2020",
},
)
post = DidYouSee.objects.get(title='Je recherche batman2')
response_second_message = self.client.post(
f"/figurines/create_question/{post.id}",
{
"title": "J'ai batman2",
"text": "j'ai batman",
"date": "20/07/2020",
}
)
response_detail = self.client.get(f'/figurines/post_detail/{post.id}/')
self.assertContains(response_detail, "j'ai batman")
self.assertTemplateUsed('figurines/post_detail.html')
def test_post_detail(self):
self.client.force_login(self.user_test)
user = User.objects.get(username="UserTest")
post = DidYouSee(
author=user,
title="Je recherche batman",
text="Bonjour, j'ai trouvé Batman",
)
post.save()
post.parent = post
post.save()
response = self.client.get(
f"/figurines/post_detail/{post.id}"
)
self.assertContains(response, "Je recherche batman")
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('figurines/post_detail.html')
"""
def test_delete_figurine(self):
self.client.login(username="UserTest", password="PaswordOfTheTest&120")
response = self.client.post('/figurines/collection/?q=logan')
user = User.objects.get(username="UserTest")
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed('figurines/collection.html')
"""
# def test_report_post(self):
# self.client.login(username="UserTest", password="PaswordOfTheTest&120")
# response = self.client.post(
# "/figurines/post_detail/51/",
# {
# "title": "Je recherche batman",
# "text": "Bonjour, j'ai trouvé Batman",
# },
# )
# self.assertEqual(response.status_code, 200)
# self.assertTemplateUsed('figurines/report_post.html')
| [
"[email protected]"
] | |
6ad1ec33ed60cb67164cba8e6c216bf23b7eff14 | 09592939eaf88d46f7d2d760d9587cb9fc22707e | /entity/cards/LETLT_083/LETLT_083.py | c575c2ef97600aa10d16c30ba708043ebfac001e | [
"MIT"
] | permissive | fulln/lushi_script | 5deb2fb99956988ee4884836443f74277b361939 | f2c5250f6ce7e3ea2b8d3ba280d999ae8c7beb8b | refs/heads/main | 2023-09-04T16:50:24.696142 | 2021-11-24T03:44:41 | 2021-11-24T03:44:41 | 431,565,901 | 0 | 0 | MIT | 2021-11-24T17:04:06 | 2021-11-24T17:04:05 | null | UTF-8 | Python | false | false | 470 | py | # -*- coding: utf-8 -*-
from hearthstone.entities import Entity
from entity.spell_entity import SpellEntity
class LETLT_083(SpellEntity):
"""
剧烈爆发
对本回合中已经行动过的敌人造成10点伤害。在下一场战斗开始时,重复此伤害。
"""
def __init__(self, entity: Entity):
super().__init__(entity)
self.damage = 0
self.range = 0
def play(self, game, hero, target):
pass
| [
"[email protected]"
] | |
92d3f6d6dc1e477f6b89f1665b180bf5ab4360da | 968913bda3879ef316100410cdb2b01333ac14a8 | /004_Algorithm_Implementations_In_Python/data_structures/queue/queue_on_list.py | 898ffac3a9c7c1fda92bb8b75af1826ee7ec17f0 | [
"MIT"
] | permissive | sm2774us/2021_Interview_Prep | 02b6a81ee52f3cb14d9e060839a01aadd84e231f | c6689411a4334d53c88581a296e57c314b50f46c | refs/heads/main | 2023-03-02T05:30:17.156821 | 2021-01-26T04:31:02 | 2021-01-26T04:31:02 | 332,603,676 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,213 | py | """Queue represented by a python list"""
class Queue():
def __init__(self):
self.entries = []
self.length = 0
self.front=0
def __str__(self):
printed = '<' + str(self.entries)[1:-1] + '>'
return printed
"""Enqueues {@code item}
@param item
item to enqueue"""
def put(self, item):
self.entries.append(item)
self.length = self.length + 1
"""Dequeues {@code item}
@requirement: |self.length| > 0
@return dequeued
item that was dequeued"""
def get(self):
self.length = self.length - 1
dequeued = self.entries[self.front]
#self.front-=1
#self.entries = self.entries[self.front:]
self.entries = self.entries[1:]
return dequeued
"""Rotates the queue {@code rotation} times
@param rotation
number of times to rotate queue"""
def rotate(self, rotation):
for i in range(rotation):
self.put(self.get())
"""Enqueues {@code item}
@return item at front of self.entries"""
def front(self):
return self.entries[0]
"""Returns the length of this.entries"""
def size(self):
return self.length
| [
"[email protected]"
] | |
09b392b45aef0ce2b082eaa210be15285a463e0c | 45015c94a4376a4af66e4134f0552288cd15a2d8 | /services/authentication_service.py | ee9f1e65813dcf31637b0a0974cb9c00e4c7b390 | [] | no_license | Anubhav722/trello | 971111af8cbc1f6c344ace200e2741e809e9a1fa | 600b5410cde7fd2a51720fa4ca7cc2ecfbff322e | refs/heads/master | 2023-07-13T18:24:51.937539 | 2021-08-21T13:22:17 | 2021-08-21T13:22:17 | 398,563,384 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py |
class AuthenticationService:
def __init__(self, ttl):
self.tokens = {} # Map<token_id, user_obj>
def renew_token(self, token_id):
pass
def authenticate_request(self, token_id, timestamp):
pass
def register_user(self, ):
pass
| [
"[email protected]"
] | |
538b671955b4ac1fa9cf8fb82d290212541efada | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /djDJHv3nwWsRM9mtu_15.py | d8d00d9f36be6af88a931dc7bc4cd7cb6aa76d74 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py |
def validate_spelling(txt):
return "".join(txt.split(". ")[:-1]).lower() == txt.split(". ")[-1][:-1].lower()
| [
"[email protected]"
] | |
b3afdc5ed5a2cd8de578e1fd31eb490f17a5db95 | 2455062787d67535da8be051ac5e361a097cf66f | /Producers/BSUB/TrigProd_amumu_a5_dR5/trigger_amumu_producer_cfg_TrigProd_amumu_a5_dR5_499.py | 14a070c95d6dc5d7822dce37415383786cbf8e82 | [] | no_license | kmtos/BBA-RecoLevel | 6e153c08d5ef579a42800f6c11995ee55eb54846 | 367adaa745fbdb43e875e5ce837c613d288738ab | refs/heads/master | 2021-01-10T08:33:45.509687 | 2015-12-04T09:20:14 | 2015-12-04T09:20:14 | 43,355,189 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,360 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("PAT")
#process.load("BBA/Analyzer/bbaanalyzer_cfi")
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.load('Configuration.EventContent.EventContent_cff')
process.load("Configuration.Geometry.GeometryRecoDB_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("PhysicsTools.PatAlgos.producersLayer1.patCandidates_cff")
process.load("PhysicsTools.PatAlgos.selectionLayer1.selectedPatCandidates_cff")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'MCRUN2_71_V1::All', '')
process.load("Configuration.StandardSequences.MagneticField_cff")
####################
# Message Logger
####################
process.MessageLogger.cerr.FwkReport.reportEvery = cms.untracked.int32(100)
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
## switch to uncheduled mode
process.options.allowUnscheduled = cms.untracked.bool(True)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(500)
)
####################
# Input File List
####################
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('root://eoscms//eos/cms/store/user/ktos/RECO_Step3_amumu_a5/RECO_Step3_amumu_a5_499.root'),
secondaryFileNames = cms.untracked.vstring()
)
############################################################
# Defining matching in DeltaR, sorting by best DeltaR
############################################################
process.mOniaTrigMatch = cms.EDProducer("PATTriggerMatcherDRLessByR",
src = cms.InputTag( 'slimmedMuons' ),
matched = cms.InputTag( 'patTrigger' ), # selections of trigger objects
matchedCuts = cms.string( 'type( "TriggerMuon" ) && path( "HLT_Mu16_TkMu0_dEta18_Onia*")' ), # input does not yet have the 'saveTags' parameter in HLT
maxDPtRel = cms.double( 0.5 ), # no effect here
maxDeltaR = cms.double( 0.3 ), #### selection of matches
maxDeltaEta = cms.double( 0.2 ), # no effect here
resolveAmbiguities = cms.bool( True ),# definition of matcher output
resolveByMatchQuality = cms.bool( True )# definition of matcher output
)
# talk to output module
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string("file:RECO_Step3_amumu_a5_TrigProd_499.root"),
outputCommands = process.MINIAODSIMEventContent.outputCommands
)
process.out.outputCommands += [ 'drop *_*_*_*',
'keep *_*slimmed*_*_*',
'keep *_pfTausEI_*_*',
'keep *_hpsPFTauProducer_*_*',
'keep *_hltTriggerSummaryAOD_*_*',
'keep *_TriggerResults_*_HLT',
'keep *_patTrigger*_*_*',
'keep *_prunedGenParticles_*_*',
'keep *_mOniaTrigMatch_*_*'
]
################################################################################
# Running the matching and setting the the trigger on
################################################################################
from PhysicsTools.PatAlgos.tools.trigTools import *
switchOnTrigger( process ) # This is optional and can be omitted.
switchOnTriggerMatching( process, triggerMatchers = [ 'mOniaTrigMatch'
])
process.outpath = cms.EndPath(process.out)
| [
"[email protected]"
] | |
52cf3aac7e139b3a4d760b80cc223a9bd88e323d | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03496/s023860422.py | 3418e271fe6d39c5afd0834fa668eb6252fedf15 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | n = int(input())
a = list(map(int,input().split()))
mi = a[0]
mii = 1
ma = a[0]
mai = 1
for i in range(n):
if a[i] > ma:
ma = a[i]
mai = i+1
if a[i] < mi:
mi = a[i]
mii = i+1
if mi >= 0:
print(n-1)
for i in range(1,n):
print(i,i+1)
elif ma <= 0:
print(n-1)
for i in range(n,1,-1):
print(i,i-1)
elif abs(ma) >= abs(mi):
print(n*2-1)
for i in range(n):
print(mai,i+1)
for i in range(1,n):
print(i,i+1)
else:
print(n*2-1)
for i in range(n):
print(mii,i+1)
for i in range(n,1,-1):
print(i,i-1) | [
"[email protected]"
] | |
f86f1440c1dfce4772c26f8bd9d40aeb6c368956 | 27a066c48096e30e3cf4a795edf6e8387f63728b | /mysite/django_vises/runtimes/misc.py | dbb4cc342ce1012cbf1a9397f2dea0e09cf202d4 | [] | no_license | 26huitailang/django-tutorial | 2712317c3f7514743e90fb4135e5fe3fed5def90 | 28a0b04ee3b9ca7e2d6e84e522047c63b0d19c8f | refs/heads/master | 2023-01-07T11:55:37.003245 | 2019-09-04T09:19:50 | 2019-09-04T09:19:50 | 113,199,279 | 1 | 0 | null | 2023-01-03T15:24:01 | 2017-12-05T15:27:52 | Python | UTF-8 | Python | false | false | 2,885 | py | #!/usr/bin/env python
# coding=utf-8
# import glob
import os
import operator
from django.utils.six import text_type
# copy from rest_framework
# Header encoding (see RFC5987)
HTTP_HEADER_ENCODING = 'iso-8859-1'
def get_request_client_ip_address(request):
"""获取 request 请求来源 ip address, 支持 nginx 使用 X-Real-IP/X-FORWARDED-FOR 传递来源 ip 地址
"""
ip = request.META.get('X-Real-IP') or request.META.get('HTTP_X_FORWARDED_FOR')
if ip:
ip = ip.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def get_authorization_header(request):
"""
Return request's 'Authorization:' header, as a bytestring.
Hide some test client ickyness where the header can be unicode.
"""
auth = request.META.get('HTTP_AUTHORIZATION', b'')
if isinstance(auth, text_type):
# Work around django test client oddness
auth = auth.encode(HTTP_HEADER_ENCODING)
return auth
def get_authorization_token_from_header(request):
"""
Return request's 'Authorization:' token
"""
keyword = 'Token'
auth = get_authorization_header(request).split()
if not auth or auth[0].lower() != keyword.lower().encode():
return None
# if len(auth) == 1:
# msg = _('Invalid token header. No credentials provided.')
# raise exceptions.AuthenticationFailed(msg)
# elif len(auth) > 2:
# msg = _('Invalid token header. Token string should not contain spaces.')
# raise exceptions.AuthenticationFailed(msg)
#
# try:
# token = auth[1].decode()
# except UnicodeError:
# msg = _('Invalid token header. Token string should not contain invalid characters.')
# raise exceptions.AuthenticationFailed(msg)
if len(auth) != 2:
return None
try:
token = auth[1].decode()
except UnicodeError:
return None
return token
def str_to_boolean(text):
"""将字符转为布尔值,if条件可以扩展"""
if text.lower() in ['false']:
return False
elif text.lower() in ['true']:
return True
def sort_dict_list(dict_to_sort: dict = None, sort_key='', reverse=False) -> list:
sorted_list = sorted(dict_to_sort, key=operator.itemgetter(sort_key), reverse=reverse)
return sorted_list
def get_local_suite_img_list(suite_path: str = None, format='jpg') -> list:
"""获取本地suite的图片列表"""
if suite_path is None:
return []
# 复杂的路径glob匹配不了
# img_file_list = glob.glob('{}/*.{}'.format(suite_path, format))
files_list = os.listdir(suite_path)
img_file_list = list(filter(lambda x: x.endswith(format), files_list))
return img_file_list
def get_local_suite_count(suite_path: str = None) -> int:
"""本地suite图片数量"""
return len(get_local_suite_img_list(suite_path))
| [
"[email protected]"
] | |
a4b2331bc60e49067ff8516c4b13766c7a4c9c5e | e60a342f322273d3db5f4ab66f0e1ffffe39de29 | /parts/zodiac/pyramid/tests/test_config/pkgs/scannable/another.py | 2022b704558f0f407eb359cc3d36dfdfe3a9041b | [] | no_license | Xoting/GAExotZodiac | 6b1b1f5356a4a4732da4c122db0f60b3f08ff6c1 | f60b2b77b47f6181752a98399f6724b1cb47ddaf | refs/heads/master | 2021-01-15T21:45:20.494358 | 2014-01-13T15:29:22 | 2014-01-13T15:29:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | /home/alex/myenv/zodiac/eggs/pyramid-1.4-py2.7.egg/pyramid/tests/test_config/pkgs/scannable/another.py | [
"[email protected]"
] | |
109682c64aad2d044c9cc951d3a773c4106965bc | 6d39e9031c9ab28c094edc042559dc649308528e | /backend/manage.py | d84a5dce9a344fbcab5dc3e5650245daac85af08 | [] | no_license | crowdbotics-apps/test-31900 | ead3f9341645ce981aa07dad2756548ffb84c5d7 | d5b54e57dfc2839f1a9f7237deb4df7c3899e30d | refs/heads/master | 2023-08-29T16:35:59.385571 | 2021-11-10T00:12:56 | 2021-11-10T00:12:56 | 426,427,828 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 630 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_31900.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
9c7d677d074b5d250abc200c103cff8fb806b269 | df94f543424f47f87bd6d546cca23d1c5a7b024c | /easy/easy922.py | f8409367478a0930ddc49d9bb4bc49ab8b62ce17 | [] | no_license | wangpeibao/leetcode-python | c13cb63304e91dcd55ffacee541d9197cafd01ff | 392a272a799decdd77c2410a89787ea8e1aa76d3 | refs/heads/master | 2023-01-31T05:09:34.850459 | 2020-12-04T03:25:21 | 2020-12-04T03:25:21 | 257,457,585 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,263 | py | '''
922. 按奇偶排序数组 II
给定一个非负整数数组 A, A 中一半整数是奇数,一半整数是偶数。
对数组进行排序,以便当 A[i] 为奇数时,i 也是奇数;当 A[i] 为偶数时, i 也是偶数。
你可以返回任何满足上述条件的数组作为答案。
示例:
输入:[4,2,5,7]
输出:[4,5,2,7]
解释:[4,7,2,5],[2,5,4,7],[2,7,4,5] 也会被接受。
提示:
2 <= A.length <= 20000
A.length % 2 == 0
0 <= A[i] <= 1000
'''
from typing import List
class Solution:
def sortArrayByParityII(self, A: List[int]) -> List[int]:
# 双指针
start = 0
length = len(A)
while start < length:
if (start % 2 == 0 and A[start] % 2 == 0) or (start % 2 == 1 and A[start] % 2 == 1):
start += 1
continue
# 处理找到下个不符合的
end = start + 1
while end < length:
if (end % 2 == 0 and A[end] % 2 == 0) or (end % 2 == 1 and A[end] % 2 == 1):
end += 2
else:
A[start], A[end] = A[end], A[start]
start = start + 2
return A
so = Solution()
print(so.sortArrayByParityII([4,2,5,7]) == [4,5,2,7]) | [
"[email protected]"
] | |
cbc684ff9ae4dd85231ece8eaed2a8851b6264ba | deaf60a5ba012e68f8509c0df0d35a5228419b71 | /找商网/zhao_shang_wang_changxin/zhao_shang_wang/spiders/spider_data.py | a34737cdfa962283d95ea12c2c4ffaafadfb4f46 | [] | no_license | kokohui/con_spider | 7162d8e58725d9334db5f1da34649cd1d1ef29ea | da1181b53e5cbca546d1bb749f9efc2f48e698f8 | refs/heads/master | 2022-03-03T19:37:33.721533 | 2019-08-22T10:05:32 | 2019-08-22T10:05:32 | 193,631,800 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,365 | py | # -*- coding: utf-8 -*-
from ..items import ZhaoShangWangItem
import scrapy
from scrapy import Request
from bs4 import BeautifulSoup
import os
import random
import requests
import pymysql
import time
import re
import jieba.analyse
conn = pymysql.connect(host='192.168.1.210', user='root', passwd='zhangxing888', db='ktcx_buschance', port=3306,
charset='utf8')
cur = conn.cursor() # 获取一个游标
class SpiderDataSpider(scrapy.Spider):
name = 'spider_data'
# start_urls = ['https://www.zhaosw.com/product/search/1541291/2']
def start_requests(self):
sql_id = "SELECT url FROM bus_spider_data WHERE source = '找商网' and TYPE = 'gongying' AND is_del = '0' AND isuse = '0' ORDER BY create_date LIMIT 1 "
cur.execute(sql_id)
res_all_list = cur.fetchall()
url = res_all_list[0][0]
for num in range(1, 2):
url_2 = 'https://www.zhaosw.com/product/search/{}/{}'.format(url, num)
print(url_2)
yield Request(url=url_2, callback=self.parse)
def parse(self, response):
detail_url = response.xpath('//*[@id="productForm"]/div[@class="m-product-list"]/a/@href')[0].extract()
yield Request(url=detail_url, callback=self.parse_2)
def parse_2(self, response):
res_url = response.xpath('/html/body/header/div/div[4]/div/div/div/ul/li[2]/a/@href')[0].extract()
yield Request(url=res_url, callback=self.parse_3)
def parse_3(self, response):
pro_url_list = response.xpath('//*[@id="productForm"]/div[3]/div/a/@href').extract()
for pro_url in pro_url_list:
yield Request(url=pro_url, callback=self.parse_detail)
def parse_detail(self, response):
item = ZhaoShangWangItem()
mobile = ''
result_count = 0
try:
mobile = response.xpath('//p[@class="p3"]/span[@class="span2"]/text()')[0].extract().strip()
com_name = str(response.xpath('//p[@class="p-title"]/a/text()').extract()[0]).strip()
sql_count = "select count(0) from bus_user where company_name='{}'".format(com_name)
cur.execute(sql_count)
result = cur.fetchall()
result_count = int(result[0][0])
except:
print('没有手机号或公司重复')
if mobile != '' and result_count == 0:
print('................................................')
# 数据库获取id
sql_id = "SELECT one_level,two_level,three_level,keyword FROM bus_spider_data WHERE source = '找商网' and TYPE = 'gongying' AND is_del = '0' AND isuse = '0' ORDER BY create_date LIMIT 1 "
cur.execute(sql_id)
print('sql_id?????????????', sql_id)
res_all_list = cur.fetchall()
for res_all in res_all_list:
one_level = res_all[0]
item['one_level_id'] = str(one_level)
print('id.........', item['one_level_id'])
two_level = res_all[1]
item['two_level_id'] = str(two_level)
print('id.........', item['two_level_id'])
three_level = res_all[2]
item['three_level_id'] = str(three_level)
print('id.........', item['three_level_id'])
keywords = res_all[-1]
item['keywords'] = str(keywords)
# 保存商品图片
os_img_2_list = []
try:
str_ran = str(random.randint(0, 999999))
os.makedirs('/home/imgServer/hc/{}'.format(str_ran))
# 将图片链接保存到硬盘
res_img = response.xpath('//*[@id="productImage"]/div[2]/ul/li/a/img/@src')
for img_url in res_img:
img_url = img_url.extract()
img_url = 'https:' + img_url.strip()
img_url = re.sub('\.\.\d+x\d+.jpg', '', img_url)
print('img_url>>>>>>>>>>>>><<<<<<<<<<<<<<<<<::::::', img_url)
code_img = requests.get(url=img_url).content
img_name = str(random.randint(1, 999999))
with open('/home/imgServer/hc/{}/{}.jpg'.format(str_ran, img_name), 'wb') as f:
f.write(code_img)
os_img_2 = 'http://img.youkeduo.com.cn/hc/' + '{}/{}.jpg'.format(str_ran, img_name)
os_img_2_list.append(os_img_2)
os_img_2_str_1 = os_img_2_list[0]
os_img_2_str = ','.join(os_img_2_list)
item['list_img'] = os_img_2_str_1
item['imgs'] = os_img_2_str
print('图片ok', os_img_2_list)
except:
print('图片错误.')
# 创建时间
create_date = time.strftime('%Y.%m.%d %H:%M:%S ', time.localtime(time.time()))
item['create_date'] = create_date
# 价格
price = ''
try:
price = str(response.xpath('/html/body/main/div[4]/div[1]/div[2]/div[2]/div[1]/div/span/text()').extract()[0].strip())
if price.startswith('¥'):
price = price[1:]
if not price:
price = '面议'
print('price', price)
except:
print('price', price)
item['price'] = price
# 标题
title = ''
try:
title = str(response.xpath('/html/body/main/div[4]/div[1]/div[2]/div[1]/h4/text()').extract()[0])
print('title', title)
except:
print('title', title)
item['title'] = title
# way
if price != '':
way = '0'
else:
way = '1'
item['way'] = way
res_detail_html = response.text
try:
soup = BeautifulSoup(res_detail_html, 'lxml')
html_1 = str(soup.find('div', class_="parameter-body"))
html = str(soup.find('div', class_="introduction-body clearfix"))
# print(html)
strinfo = re.compile('<img.*?>')
html_2 = strinfo.sub('', html)
strinfo = re.compile('<br.*?>')
html_3 = strinfo.sub('', html_2)
strinfo = re.compile('慧聪网')
html_4 = strinfo.sub('优客多', html_3)
# 把下载图片添加到html
div_list = ['<div id="img_detail">', '</div>']
for os_img_2_url in os_img_2_list:
os_img_2_url = '<img alt="{}" src="{}">'.format(title, os_img_2_url)
div_list.insert(1, os_img_2_url)
div_str = '\n'.join(div_list)
html_all = html_1 + html_4 + '\n' + div_str
# print(html_all)
except Exception as e:
raise e
item['detail'] = str(html_all)
# units
units = ''
try:
units = response.xpath('/html/body/main/div[4]/div[1]/div[2]/div[2]/div[1]/div/text()').extract()[-1]
units = units.strip().replace('/', '').replace('\n', '')
print('units', units)
except:
print('units', units)
item['units'] = units
# com_name
com_name = ''
try:
com_name = str(response.xpath('//p[@class="p-title"]/a/text()').extract()[0]).strip()
print('com_name', com_name)
except:
print('com_name', com_name)
item['com_name'] = com_name
# linkman
linkman = ''
try:
linkman = re.findall('<span.*?>联系人:</span><span.*?>(.*?)</span>', response.text)[0]
print('linkman', linkman)
except:
print('linkman', linkman)
item['linkman'] = linkman
# mobile
mobile = ''
try:
mobile = response.xpath('//p[@class="p3"]/span[@class="span2"]/text()')[0].extract().strip()
print('mobile', mobile)
except:
print('mobile', mobile)
item['mobile'] = mobile
# address
address = ''
try:
address = re.findall('<span.*?>所在地区:</span><span.*?>(.*?)</span>', response.text)[0]
print('address', address)
except:
print('address', address)
item['address'] = address
scopes = '-'
try:
scopes = response.xpath('//div[@class="p7-content"]/span[2]/a/text()').extract()
scopes = str(scopes).strip('[').strip(']').replace("'", "").replace(",", " ")
print('scopes', scopes)
except:
print('scopes', scopes)
item['scopes'] = scopes
summary = ''
try:
summary = response.xpath('//div[@class="p-contain"]/p[@class="p4"]/span[2]/text()')[0].extract()
print('summary>>>>>>>>>>>>>>>', summary)
except:
print('summary', summary)
item['summary'] = summary
yield item
| [
"[email protected]"
] | |
61ee902f9aec9bdeff25f6e72569396187f62aff | 01afa0be1c3acbf562fd87bd8fec8b4101c1e461 | /Mining-Massive-Dataset/week5/advanced_quiz3.py | 8c96a6d7d682c7d9d8f2ec6fe73c3b09bf879b97 | [] | no_license | listiani13/coursera | e4f1116cc619b62336c5bb4d2e714e7051ae775c | 5c84cf7171a440261de639b53558e9767b1cd85e | refs/heads/master | 2021-01-22T03:54:31.657656 | 2016-04-04T11:07:25 | 2016-04-04T11:07:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 424 | py | from math import sqrt
def euclidean(x, y):
return sqrt((x[0] - y[0])**2 + (x[1] - y[1])**2)
points = [(1, 6), (3, 7), (4, 3), (7, 7), (8, 2), (9, 5)]
chosen = [(0, 0), (10, 10)]
for _ in range(5):
pos, mx = -1, -1
for i, p in enumerate(points):
distance = min([euclidean(p, pc) for pc in chosen])
if distance > mx:
mx, pos = distance, i
print 'choose:', points[pos]
chosen.append(points[pos])
del points[pos]
| [
"[email protected]"
] | |
f32b08a5dadf9bf4dbc0b238e4cb160e93b689f5 | 3a01d6f6e9f7db7428ae5dc286d6bc267c4ca13e | /pylith/meshio/OutputMatElastic.py | 75bd619e57bb719fa4f7cc5e470df1ff774171da | [
"MIT"
] | permissive | youngsolar/pylith | 1ee9f03c2b01560706b44b4ccae99c3fb6b9fdf4 | 62c07b91fa7581641c7b2a0f658bde288fa003de | refs/heads/master | 2020-12-26T04:04:21.884785 | 2014-10-06T21:42:42 | 2014-10-06T21:42:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,792 | py | #!/usr/bin/env python
#
# ----------------------------------------------------------------------
#
# Brad T. Aagaard, U.S. Geological Survey
# Charles A. Williams, GNS Science
# Matthew G. Knepley, University of Chicago
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2014 University of California, Davis
#
# See COPYING for license information.
#
# ----------------------------------------------------------------------
#
## @file pyre/meshio/OutputMatElastic.py
##
## @brief Python object for managing output of finite-element
## information for material state variables.
##
## Factory: output_manager
from OutputManager import OutputManager
# OutputMatElastic class
class OutputMatElastic(OutputManager):
"""
Python object for managing output of finite-element information for
material state variables.
Factory: output_manager
"""
# INVENTORY //////////////////////////////////////////////////////////
class Inventory(OutputManager.Inventory):
"""
Python object for managing OutputMatElastic facilities and properties.
"""
## @class Inventory
## Python object for managing OutputMatElastic facilities and properties.
##
## \b Properties
## @li \b cell_info_fields Names of cell info fields to output.
## @li \b cell_data_fields Names of cell data fields to output.
##
## \b Facilities
## @li None
import pyre.inventory
cellInfoFields = pyre.inventory.list("cell_info_fields",
default=["mu",
"lambda",
"density"])
cellInfoFields.meta['tip'] = "Names of cell info fields to output."
cellDataFields = pyre.inventory.list("cell_data_fields",
default=["total_strain", "stress"])
cellDataFields.meta['tip'] = "Names of cell data fields to output."
# PUBLIC METHODS /////////////////////////////////////////////////////
def __init__(self, name="outputmatelastic"):
"""
Constructor.
"""
OutputManager.__init__(self, name)
return
# PRIVATE METHODS ////////////////////////////////////////////////////
def _configure(self):
"""
Set members based using inventory.
"""
OutputManager._configure(self)
self.vertexInfoFields = []
self.vertexDataFields = []
self.cellInfoFields = self.inventory.cellInfoFields
self.cellDataFields = self.inventory.cellDataFields
return
# FACTORIES ////////////////////////////////////////////////////////////
def output_manager():
"""
Factory associated with OutputManager.
"""
return OutputMatElastic()
# End of file
| [
"[email protected]"
] | |
5bd234d032a1cef724c7d19f94ecdca75497c3b5 | 803bab6f782099d995bcdb99d163486f4fff8c50 | /test/test_pointnav_resnet_policy.py | f58a4a45e857196c0ab6b215a39c3fce54de9832 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"CC-BY-NC-SA-3.0"
] | permissive | facebookresearch/habitat-lab | 7088506509f64da6d682f5dc69427589f71a58a9 | f5b29e62df0788d70ba3618fc738fa4e947428ba | refs/heads/main | 2023-08-24T14:00:02.707343 | 2023-08-23T04:53:48 | 2023-08-23T04:53:48 | 169,164,391 | 792 | 298 | MIT | 2023-09-14T15:20:03 | 2019-02-04T23:12:51 | Python | UTF-8 | Python | false | false | 4,432 | py | #!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os.path
import shlex
import subprocess
import numpy as np
import pytest
import torch
from gym import spaces
from habitat import read_write
from habitat_baselines.config.default import get_config
from habitat_baselines.rl.ddppo.policy import PointNavResNetPolicy
ACTION_SPACE = spaces.Discrete(4)
OBSERVATION_SPACES = {
"depth_model": spaces.Dict(
{
"depth": spaces.Box(
low=0,
high=1,
shape=(256, 256, 1),
dtype=np.float32,
),
"pointgoal_with_gps_compass": spaces.Box(
low=np.finfo(np.float32).min,
high=np.finfo(np.float32).max,
shape=(2,),
dtype=np.float32,
),
}
),
"rgb_model": spaces.Dict(
{
"rgb": spaces.Box(
low=0,
high=255,
shape=(256, 256, 3),
dtype=np.uint8,
),
"pointgoal_with_gps_compass": spaces.Box(
low=np.finfo(np.float32).min,
high=np.finfo(np.float32).max,
shape=(2,),
dtype=np.float32,
),
}
),
"blind_model": spaces.Dict(
{
"pointgoal_with_gps_compass": spaces.Box(
low=np.finfo(np.float32).min,
high=np.finfo(np.float32).max,
shape=(2,),
dtype=np.float32,
),
}
),
}
MODELS_DEST_DIR = "data/ddppo-models"
MODELS_BASE_URL = "https://dl.fbaipublicfiles.com/habitat/data/baselines/v1/ddppo/ddppo-models"
MODELS_TO_TEST = {
"gibson-2plus-resnet50.pth": {
"backbone": "resnet50",
"observation_space": OBSERVATION_SPACES["depth_model"],
"action_space": ACTION_SPACE,
},
"gibson-2plus-mp3d-train-val-test-se-resneXt50-rgb.pth": {
"backbone": "se_resneXt50",
"observation_space": OBSERVATION_SPACES["rgb_model"],
"action_space": ACTION_SPACE,
},
"gibson-0plus-mp3d-train-val-test-blind.pth": {
"backbone": None,
"observation_space": OBSERVATION_SPACES["blind_model"],
"action_space": ACTION_SPACE,
},
}
def _get_model_url(model_name):
return f"{MODELS_BASE_URL}/{model_name}"
def _get_model_path(model_name):
return f"{MODELS_DEST_DIR}/{model_name}"
@pytest.fixture(scope="module", autouse=True)
def download_data():
for model_name in MODELS_TO_TEST:
model_url = _get_model_url(model_name)
model_path = _get_model_path(model_name)
if not os.path.exists(model_path):
print(f"Downloading {model_name}.")
download_command = (
"wget --continue " + model_url + " -P " + MODELS_DEST_DIR
)
subprocess.check_call(shlex.split(download_command))
assert os.path.exists(
model_path
), "Download failed, no package found."
@pytest.mark.parametrize(
"pretrained_weights_path,backbone,observation_space,action_space",
[
(
_get_model_path(model_name),
params["backbone"],
params["observation_space"],
params["action_space"],
)
for model_name, params in MODELS_TO_TEST.items()
],
)
def test_pretrained_models(
pretrained_weights_path, backbone, observation_space, action_space
):
config = get_config(
"test/config/habitat_baselines/ddppo_pointnav_test.yaml"
)
with read_write(config):
ddppo_config = config.habitat_baselines.rl.ddppo
ddppo_config.pretrained = True
ddppo_config.pretrained_weights = pretrained_weights_path
if backbone is not None:
ddppo_config.backbone = backbone
policy = PointNavResNetPolicy.from_config(
config=config,
observation_space=observation_space,
action_space=action_space,
)
pretrained_state = torch.load(pretrained_weights_path, map_location="cpu")
prefix = "actor_critic."
policy.load_state_dict(
{ # type: ignore
k[len(prefix) :]: v
for k, v in pretrained_state["state_dict"].items()
}
)
| [
"[email protected]"
] | |
9eebd51cd8523865c63b5ea9bc13a91b30809bd9 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/I_w_M_to_Wxyz_focus_Z_ok/wiColorJ/pyr_Tcrop255_pad20_jit15/Sob_k15_s001_EroM_Mae_s001/pyr_5s/L5/step10_a.py | e6c87465892d874e7e738fd489c714ca918ab17a | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140,087 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_5side_L5 import *
from step10_a2_loss_info_obj import *
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type8_blender_kong_doc3d_in_I_gt_W_ch_norm_v2
use_loss_obj = [mae_s001_sobel_k15_s001_EroseM_loss_info_builder.set_loss_target("UNet_z").copy(), mae_s001_sobel_k15_s001_EroseM_loss_info_builder.set_loss_target("UNet_y").copy(), mae_s001_sobel_k15_s001_EroseM_loss_info_builder.set_loss_target("UNet_x").copy()] ### z, y, x 順序是看 step07_b_0b_Multi_UNet 來對應的喔
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
##################################
### 1side1
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_1__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 1side2
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_2__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_2__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 1side3
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_3__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_3__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_3__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 1side4
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_4__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_4__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_4__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_1side_4__2side_4__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 1side5
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_5__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_5__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_5__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_1side_5__2side_4__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_1side_5__2side_5__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 5side6
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_6__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_6__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_6__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_1side_6__2side_4__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_1side_6__2side_5__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 15 "21" 28 36 45 55
# 2side6 OK 56
ch032_1side_6__2side_6__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_1side_1__2side_1__3side_1_4side_1_5s1.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.