blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8176f29c210a52c8544016e57564ace030a86875 | 155a25eb18213664da9978030e6743b04d570141 | /manage.py | 825bd789c5cf66c550320c139be766171af95606 | [] | no_license | powellc/timberwyck | 0d6fd6e46c2899f32dda37faa8030a8c7080bc97 | 583cbc2ee33cb56187db13c94d5d4af74f51c9bd | refs/heads/master | 2020-05-18T13:59:20.394609 | 2014-05-03T05:09:18 | 2014-05-03T05:09:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "timberwyck.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Dev")
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
6d75f464bfc5dab8974a58bc1fb72ee468f050c7 | 551b75f52d28c0b5c8944d808a361470e2602654 | /huaweicloud-sdk-cts/huaweicloudsdkcts/v3/model/update_tracker_request_body.py | 86a0e288fb2da06e6d914be15bba419009029213 | [
"Apache-2.0"
] | permissive | wuchen-huawei/huaweicloud-sdk-python-v3 | 9d6597ce8ab666a9a297b3d936aeb85c55cf5877 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | refs/heads/master | 2023-05-08T21:32:31.920300 | 2021-05-26T08:54:18 | 2021-05-26T08:54:18 | 370,898,764 | 0 | 0 | NOASSERTION | 2021-05-26T03:50:07 | 2021-05-26T03:50:07 | null | UTF-8 | Python | false | false | 11,674 | py | # coding: utf-8
import pprint
import re
import six
class UpdateTrackerRequestBody:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'tracker_type': 'str',
'tracker_name': 'str',
'status': 'str',
'is_lts_enabled': 'bool',
'obs_info': 'TrackerObsInfo',
'is_support_trace_files_encryption': 'bool',
'kms_id': 'str',
'is_support_validate': 'bool',
'data_bucket': 'DataBucket'
}
attribute_map = {
'tracker_type': 'tracker_type',
'tracker_name': 'tracker_name',
'status': 'status',
'is_lts_enabled': 'is_lts_enabled',
'obs_info': 'obs_info',
'is_support_trace_files_encryption': 'is_support_trace_files_encryption',
'kms_id': 'kms_id',
'is_support_validate': 'is_support_validate',
'data_bucket': 'data_bucket'
}
def __init__(self, tracker_type=None, tracker_name=None, status=None, is_lts_enabled=None, obs_info=None, is_support_trace_files_encryption=None, kms_id=None, is_support_validate=None, data_bucket=None):
"""UpdateTrackerRequestBody - a model defined in huaweicloud sdk"""
self._tracker_type = None
self._tracker_name = None
self._status = None
self._is_lts_enabled = None
self._obs_info = None
self._is_support_trace_files_encryption = None
self._kms_id = None
self._is_support_validate = None
self._data_bucket = None
self.discriminator = None
self.tracker_type = tracker_type
self.tracker_name = tracker_name
if status is not None:
self.status = status
if is_lts_enabled is not None:
self.is_lts_enabled = is_lts_enabled
if obs_info is not None:
self.obs_info = obs_info
if is_support_trace_files_encryption is not None:
self.is_support_trace_files_encryption = is_support_trace_files_encryption
if kms_id is not None:
self.kms_id = kms_id
if is_support_validate is not None:
self.is_support_validate = is_support_validate
if data_bucket is not None:
self.data_bucket = data_bucket
@property
def tracker_type(self):
"""Gets the tracker_type of this UpdateTrackerRequestBody.
标识追踪器类型。 目前支持系统追踪器类型有管理类追踪器(system)和数据类追踪器(data)。 数据类追踪器和管理类追踪器共同参数有:is_lts_enabled, obs_info; 管理类追踪器参数:is_support_trace_files_encryption, kms_id, is_support_validate, is_support_validate; 数据类追踪器参数:tracker_name, data_bucket。
:return: The tracker_type of this UpdateTrackerRequestBody.
:rtype: str
"""
return self._tracker_type
@tracker_type.setter
def tracker_type(self, tracker_type):
"""Sets the tracker_type of this UpdateTrackerRequestBody.
标识追踪器类型。 目前支持系统追踪器类型有管理类追踪器(system)和数据类追踪器(data)。 数据类追踪器和管理类追踪器共同参数有:is_lts_enabled, obs_info; 管理类追踪器参数:is_support_trace_files_encryption, kms_id, is_support_validate, is_support_validate; 数据类追踪器参数:tracker_name, data_bucket。
:param tracker_type: The tracker_type of this UpdateTrackerRequestBody.
:type: str
"""
self._tracker_type = tracker_type
@property
def tracker_name(self):
"""Gets the tracker_name of this UpdateTrackerRequestBody.
标识追踪器名称。 当\"tracker_type\"参数值为\"system\"时该参数为默认值\"system\"。 当\"tracker_type\"参数值为\"data\"时该参数需要指定追踪器名称\"。
:return: The tracker_name of this UpdateTrackerRequestBody.
:rtype: str
"""
return self._tracker_name
@tracker_name.setter
def tracker_name(self, tracker_name):
"""Sets the tracker_name of this UpdateTrackerRequestBody.
标识追踪器名称。 当\"tracker_type\"参数值为\"system\"时该参数为默认值\"system\"。 当\"tracker_type\"参数值为\"data\"时该参数需要指定追踪器名称\"。
:param tracker_name: The tracker_name of this UpdateTrackerRequestBody.
:type: str
"""
self._tracker_name = tracker_name
@property
def status(self):
"""Gets the status of this UpdateTrackerRequestBody.
标识追踪器状态,该接口中可修改的状态包括正常(enabled)和停止(disabled)。如果选择修改状态为停止,则修改成功后追踪器停止记录事件。
:return: The status of this UpdateTrackerRequestBody.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this UpdateTrackerRequestBody.
标识追踪器状态,该接口中可修改的状态包括正常(enabled)和停止(disabled)。如果选择修改状态为停止,则修改成功后追踪器停止记录事件。
:param status: The status of this UpdateTrackerRequestBody.
:type: str
"""
self._status = status
@property
def is_lts_enabled(self):
"""Gets the is_lts_enabled of this UpdateTrackerRequestBody.
是否打开事件分析。
:return: The is_lts_enabled of this UpdateTrackerRequestBody.
:rtype: bool
"""
return self._is_lts_enabled
@is_lts_enabled.setter
def is_lts_enabled(self, is_lts_enabled):
"""Sets the is_lts_enabled of this UpdateTrackerRequestBody.
是否打开事件分析。
:param is_lts_enabled: The is_lts_enabled of this UpdateTrackerRequestBody.
:type: bool
"""
self._is_lts_enabled = is_lts_enabled
@property
def obs_info(self):
"""Gets the obs_info of this UpdateTrackerRequestBody.
:return: The obs_info of this UpdateTrackerRequestBody.
:rtype: TrackerObsInfo
"""
return self._obs_info
@obs_info.setter
def obs_info(self, obs_info):
"""Sets the obs_info of this UpdateTrackerRequestBody.
:param obs_info: The obs_info of this UpdateTrackerRequestBody.
:type: TrackerObsInfo
"""
self._obs_info = obs_info
@property
def is_support_trace_files_encryption(self):
"""Gets the is_support_trace_files_encryption of this UpdateTrackerRequestBody.
事件文件转储加密功能开关。 当\"tracker_type\"参数值为\"system\"时该参数值有效。 该参数必须与kms_id参数同时使用。
:return: The is_support_trace_files_encryption of this UpdateTrackerRequestBody.
:rtype: bool
"""
return self._is_support_trace_files_encryption
@is_support_trace_files_encryption.setter
def is_support_trace_files_encryption(self, is_support_trace_files_encryption):
"""Sets the is_support_trace_files_encryption of this UpdateTrackerRequestBody.
事件文件转储加密功能开关。 当\"tracker_type\"参数值为\"system\"时该参数值有效。 该参数必须与kms_id参数同时使用。
:param is_support_trace_files_encryption: The is_support_trace_files_encryption of this UpdateTrackerRequestBody.
:type: bool
"""
self._is_support_trace_files_encryption = is_support_trace_files_encryption
@property
def kms_id(self):
"""Gets the kms_id of this UpdateTrackerRequestBody.
事件文件转储加密所采用的秘钥id(从KMS获取)。 当\"tracker_type\"参数值为\"system\"时该参数值有效。 当\"is_support_trace_files_encryption\"参数值为“是”时,此参数为必选项。
:return: The kms_id of this UpdateTrackerRequestBody.
:rtype: str
"""
return self._kms_id
@kms_id.setter
def kms_id(self, kms_id):
"""Sets the kms_id of this UpdateTrackerRequestBody.
事件文件转储加密所采用的秘钥id(从KMS获取)。 当\"tracker_type\"参数值为\"system\"时该参数值有效。 当\"is_support_trace_files_encryption\"参数值为“是”时,此参数为必选项。
:param kms_id: The kms_id of this UpdateTrackerRequestBody.
:type: str
"""
self._kms_id = kms_id
@property
def is_support_validate(self):
"""Gets the is_support_validate of this UpdateTrackerRequestBody.
事件文件转储时是否打开事件文件校验。 当\"tracker_type\"参数值为\"system\"时该参数值有效。
:return: The is_support_validate of this UpdateTrackerRequestBody.
:rtype: bool
"""
return self._is_support_validate
@is_support_validate.setter
def is_support_validate(self, is_support_validate):
"""Sets the is_support_validate of this UpdateTrackerRequestBody.
事件文件转储时是否打开事件文件校验。 当\"tracker_type\"参数值为\"system\"时该参数值有效。
:param is_support_validate: The is_support_validate of this UpdateTrackerRequestBody.
:type: bool
"""
self._is_support_validate = is_support_validate
@property
def data_bucket(self):
"""Gets the data_bucket of this UpdateTrackerRequestBody.
:return: The data_bucket of this UpdateTrackerRequestBody.
:rtype: DataBucket
"""
return self._data_bucket
@data_bucket.setter
def data_bucket(self, data_bucket):
"""Sets the data_bucket of this UpdateTrackerRequestBody.
:param data_bucket: The data_bucket of this UpdateTrackerRequestBody.
:type: DataBucket
"""
self._data_bucket = data_bucket
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UpdateTrackerRequestBody):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
c5001ecfa2716239bb437211c0ca5878f4942947 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_underscoring.py | b4a890d3243fc3207ae8047c40277eb6f93f3f90 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | py |
from xai.brain.wordbase.verbs._underscore import _UNDERSCORE
#calss header
class _UNDERSCORING(_UNDERSCORE, ):
def __init__(self,):
_UNDERSCORE.__init__(self)
self.name = "UNDERSCORING"
self.specie = 'verbs'
self.basic = "underscore"
self.jsondata = {}
| [
"[email protected]"
] | |
c3d0de22c121cb4e7a40f9ee7dbdefe55148a230 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-8204.py | c8284994a9d91b15a9ac7250620e44db2de5eea1 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,757 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:$Type, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"[email protected]"
] | |
065a7ba25278105449e7b3d0bc7e9d75e0141fe2 | b564a7d17f224e9512fec36bab4333353381e22c | /lib/exaproxy/html/humans.py | f15fdaa44f84ee1894da006b05dcba9b027d9279 | [
"BSD-2-Clause"
] | permissive | Exa-Networks/exaproxy | 464f9c72449b12d4f3960e9829a0f93fec8db0da | 8b7291b79c1cd6542213a5e7d8dda3cf5a676166 | refs/heads/master | 2023-09-03T16:10:56.656549 | 2022-06-28T16:52:48 | 2022-06-28T16:52:48 | 13,495,150 | 127 | 25 | NOASSERTION | 2022-06-28T16:52:49 | 2013-10-11T09:36:28 | Python | UTF-8 | Python | false | false | 1,384 | py | # encoding: utf-8
"""
humans.py
Created by Thomas Mangin on 2012-02-25.
Copyright (c) 2011-2013 Exa Networks. All rights reserved.
"""
from .images import thomas,david
class humans:
txt = """\
/* TEAM */
Slave Driver / Grand Visionary: Thomas Mangin
Google+: https://plus.google.com/104241996506596749840
Github: https://github.com/thomas-mangin
Engineer Extraordinaire: David Farrar
Google+: https://plus.google.com/108845019528954357090
Github: https://github.com/david-farrar
/* Other contributors */
Marek Obuchowicz (kqueue reactor)
Github: https://github.com/marek-obuchowicz
"""
html = """\
<div style="padding: 20px 20px 20px 20px;">
<b>/* TEAM */</b><br/>
<br/>
<div style="margin-left:20px;margin-right:10px;">
<img width="100px" src="data:image/png;base64,%s"/>
</div>
<br/>
Slave Driver / Grand Visionary<br/>
<a href="https://plus.google.com/104241996506596749840">Thomas Mangin</a><br/>
<br/>
<div style="margin-left:20px;margin-right:10px;">
<img width="100px" src="data:image/png;base64,%s"/>
</div>
<br/>
Engineer Extraordinaire<br/>
<a href="https://plus.google.com/108845019528954357090">David Farrar</a><br/>
</div>
<div style="padding: 20px 20px 20px 20px;">
<b>/* Other contributors */</b>
<br/>
<a href="https://github.com/marek-obuchowicz">Marek Obuchowicz</a> (kqueue reactor)
<br/>
</div>
""" % (thomas,david)
| [
"[email protected]"
] | |
6469709fcf868289b689c5a64db4c625a21116ff | 904b4b7cd6b1709e9aded92737766a3b5a978838 | /bissextile.py | d90e2359ddb92bf8a0938ca97e262464bbf19394 | [] | no_license | NicolasLagaillardie/Python | 3ec7aa6eb21ffa86fad33060bb53e42cb7957dc9 | a30037d688d8f11a195d7fa611347528c313d71b | refs/heads/master | 2020-03-30T13:48:27.038592 | 2018-10-02T16:54:42 | 2018-10-02T16:54:42 | 151,288,608 | 0 | 0 | null | null | null | null | ISO-8859-1 | Python | false | false | 382 | py | # -*- coding: cp1252 -*-
def bissextile(annee):
if annee%4!=0:
print annee,' n\'est pas une année bissextile'
else:
if annee%100==0:
if annee%400==0:
print annee,' est bissextile'
else:
print annee,' n\'est pas une année bissextile'
else:
print annee,' est une année bissextile'
| [
"[email protected]"
] | |
fc9131f9ccde84e2d38716326e9ff70aa33bac2a | f306d169cf3b48061a7b29d297612b025f3825f7 | /yamtbx/util/__init__.py | bda055a257197c3133ec50c6e8855fe242592bab | [
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | robertbuecker/yamtbx | b2b43c09ec27a6aa08c1b9330f731df2f95d82c6 | 9b90e03d27600fd9e550252dcb65c1109f04c44f | refs/heads/master | 2020-06-20T07:03:17.679343 | 2019-07-15T00:17:06 | 2019-07-15T00:17:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,267 | py | """
(c) RIKEN 2015. All rights reserved.
Author: Keitaro Yamashita
This software is released under the new BSD License; see LICENSE.
"""
import os
import sys
import re
import shutil
import subprocess
import commands
import glob
import tempfile
from libtbx.utils import null_out
import libtbx.load_env
def call(cmd, arg="",
stdin=None, stdout=subprocess.PIPE,
wdir=None,
expects_in=[], expects_out=[]):
##
# call the external program using subprocess.
#
# @param expects_in expected files before running
# @param expects_out expected files after running
#
# expected_in/out must be written as relative path from wdir or absolute path.
#
def check_exist(files):
is_exist = [os.path.isfile(f) for f in files]
if sum(is_exist) != len(is_exist):
not_founds = [ f for f, e in zip(files, is_exist) if not e ]
raise Exception("Expected file(s) not found: " + " ".join(not_founds))
# check_exist()
if wdir is None:
wdir = os.getcwd()
# Go to working directory
cwd = os.getcwd()
os.chdir(wdir)
# check before run
check_exist(expects_in)
# call the program
p = subprocess.Popen("%s %s" % (cmd, arg),
shell=True,
stdin=subprocess.PIPE,
stdout=stdout,
stderr=stdout
)
if stdin is not None:
p.stdin.write(stdin)
if stdout == subprocess.PIPE:
out, err = p.communicate()
else:
out, err = None, None
p.stdin.close()
p.wait()
if p.returncode < 0:
print >>sys.stderr, cmd, ": returncode is", p.returncode
# check after run
check_exist(expects_out)
# go back to the previous working directory
os.chdir(cwd)
return p.returncode, out, err
# call()
def rotate_file(filename, copy=False):
"""
Rotate file like logrotate.
If given filename already exists, rename it to "filename".n, n=1...
Filename with larger n is older one.
"""
# If not exist,
if not os.path.isfile(filename):
return
# make list [ [filename, number], ... ]
old_list = []
dot_files = glob.glob(filename + ".*")
for f in dot_files:
suffix = f.replace(filename+".", "")
try:
i = int(suffix)
if str(i) == suffix: # ignore if suffix was such as 003...
old_list.append([f, i])
except ValueError, e:
continue
old_list.sort(lambda x,y: x[1]-y[1])
# rotate files
for f, i in reversed(old_list):
os.rename(f, "%s.%d" % (f[:f.rfind(".")], i+1))
if copy:
shutil.copyfile(filename, filename + ".1")
else:
os.rename(filename, filename + ".1")
return filename + ".1"
# rotate_file()
def safe_copy(src, dst, move=False):
"""
Don't reveal file before copy completed.
"""
src_name = os.path.basename(src)
if os.path.isdir(dst): dst = os.path.join(dst, src_name)
tmpfd, tmp = tempfile.mkstemp(prefix="."+src_name, dir=os.path.dirname(dst))
os.close(tmpfd)
shutil.copy2(src, tmp)
os.rename(tmp, dst)
if move and os.path.isfile(dst) and not os.path.islink(dst) and os.path.getsize(src)==os.path.getsize(dst):
os.remove(src)
# safe_copy()
def commonalize(Is):
new_Is = []
Is0 = Is[0]
for I in Is[1:]:
Is0, I = Is0.common_sets(I, assert_is_similar_symmetry=False)
new_Is.append(I)
Is = []
for I in new_Is:
I = I.common_set(Is0, assert_is_similar_symmetry=False)
assert len(Is0.data()) == len(I.data())
Is.append(I)
return [Is0,] + Is
# commonalize()
def get_number_of_processors(default=4):
nproc = default
if os.path.isfile("/proc/cpuinfo"):
nproc = len(filter(lambda x:x.startswith("processor"), open("/proc/cpuinfo")))
else:
try:
nproc = int(commands.getoutput("sysctl -n hw.ncpu"))
except:
pass
return nproc
# get_number_of_processors()
def safe_float(v):
try:
return float(v)
except ValueError:
return float("nan")
# safe_float()
def num_th_str(v):
s = str(v)
if s[-1] == "1": return s+"st"
if s[-1] == "2": return s+"nd"
if s[-1] == "3": return s+"rd"
return s+"th"
# num_th_str()
def directory_included(path, topdir=None, include_dir=[], exclude_dir=[]):
if topdir is None:
for d in include_dir:
if directory_included(path, d): return True
return False
l1 = filter(lambda x: x, path.split(os.sep))
l2 = filter(lambda x: x, topdir.split(os.sep))
lc = os.path.commonprefix([l1,l2])
if len(lc) != len(l2): return False
if include_dir == exclude_dir == []:
return True
if include_dir != []:
for d in include_dir:
if directory_included(path, d): return True
return False
if exclude_dir != []:
for d in exclude_dir:
if directory_included(path, d): return False
return True
# directory_included()
def read_path_list(lstin, comment_strs=["#"], only_exists=False, as_abspath=False, err_out=null_out()):
ret = []
for l in open(lstin):
for c in comment_strs:
if c in l: l = l[:l.index(c)]
l = l.strip()
if not l: continue
if only_exists and not os.path.exists(l):
err_out.write("Error: file not found: %s\n"%l)
continue
ret.append(os.path.abspath(l) if as_abspath else l)
return ret
# read_path_list()
def return_first_found_file(files, wd=None):
for f in files:
if wd is not None: f = os.path.join(wd, f)
if os.path.isfile(f): return f
# return_first_found_file()
def expand_wildcard_in_list(fdlst, err_out=null_out()):
ret = []
for d in fdlst:
gd = glob.glob(d)
if len(gd) == 0:
print >>err_out, "Error: No match!!: %s" % d
continue
ret.extend(gd)
return ret
# expand_wildcard_in_list()
def check_disk_free_bytes(d):
try:
x = os.statvfs(d)
return x.f_frsize * x.f_bavail
except:
return -1
# check_disk_free_bytes()
def get_temp_local_dir(prefix, min_bytes=None, min_kb=None, min_mb=None, min_gb=None, additional_tmpd=None):
assert (min_bytes, min_kb, min_mb, min_gb).count(None) >= 2
min_free_bytes = 0
if min_bytes is not None: min_free_bytes = min_bytes
if min_kb is not None: min_free_bytes = min_kb * 1024
if min_mb is not None: min_free_bytes = min_mb * 1024**2
if min_gb is not None: min_free_bytes = min_gb * 1024**3
ramdisk = "/dev/shm"
if os.path.isdir(ramdisk): tmpdirs = [ramdisk, tempfile.gettempdir()]
else: tmpdirs = [tempfile.gettempdir()]
if type(additional_tmpd) is str:
tmpdirs.append(additional_tmpd)
elif type(additional_tmpd) in (list, tuple):
tmpdirs.extend(additional_tmpd)
for tmpdir in tmpdirs:
if check_disk_free_bytes(tmpdir) >= min_free_bytes:
return tempfile.mkdtemp(prefix=prefix, dir=tmpdir)
return None
# get_temp_local_dir()
def get_temp_filename(prefix="tmp", suffix="", wdir=None):
tmpfd, tmp = tempfile.mkstemp(prefix=prefix, suffix=suffix, dir=wdir)
os.close(tmpfd)
return tmp
# get_temp_filename()
def replace_forbidden_chars(filename, repl="-"):
return re.sub(r"[/><\*\\\?%:]", repl, filename)
# replace_forbidden_chars()
def human_readable_bytes(bytes):
if bytes < 1024:
return bytes, "B"
elif bytes < 1024**2:
return bytes/1024., "KB"
elif bytes < 1024**3:
return bytes/1024.**2, "MB"
elif bytes < 1024**4:
return bytes/1024.**3, "GB"
elif bytes < 1024**5:
return bytes/1024.**4, "TB"
else:# if bytes < 1024**6:
return bytes/1024.**5, "PB"
# human_readable_bytes()
def yamtbx_module_root():
"""
Possible locations: modules/yamtbx or modules/yamtbx/yamtbx
"""
tmp = libtbx.env.find_in_repositories("yamtbx/yamtbx")
if tmp: return tmp
tmp = libtbx.env.find_in_repositories("yamtbx")
if tmp: return tmp
# yamtbx_module_root()
| [
"[email protected]"
] | |
a61e686a2a19b194f74d057cf3102cd5df782b64 | ff8bd1967aeb66ffec17c3ae78102c168414761a | /PythonIntroduction/datatypes/Strings/Stringemployee.py | b4b8a6638010e9657f3ff95b34f617e7bc0a2ee0 | [] | no_license | charan2108/Pythondirectories | d5cbec41db0685bbfc41a3135edc3e41fd223474 | 30b4a9f9171fe2295efbf12cbf9cbcf88bdf29b3 | refs/heads/master | 2023-03-28T03:29:25.039491 | 2021-03-29T02:45:19 | 2021-03-29T02:45:19 | 352,484,535 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | car_name = "Ferrari"
car_model= "F360"
car_manufactured = 2020 | [
"[email protected]"
] | |
a9eb757a2b0a176611cde9701778712b3dd565df | bec8abb5c3146377f1b3bc2f2b4eaa4d02502211 | /mediascraper/the_procrastinators/youtube_scraper.py | 7383cc710c70c57551b36229ef8259fb99726bbb | [
"MIT"
] | permissive | Kokitis/MediaScraper | 578f3d96f1ef731906e03e56db77e141823f8681 | 8bd7294942945d90838357f14e10558a0512e316 | refs/heads/master | 2020-03-26T11:51:34.460233 | 2018-08-16T00:45:37 | 2018-08-16T00:45:37 | 144,863,333 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | from pathlib import Path
import requests
import json
import yaml
from pprint import pprint
from typing import List, Tuple
import re
import datetime
from dataclasses import dataclass
from pytools.timetools import Duration
shownotes_regex = ""
@dataclass
class Shownote:
timestamp: Duration
title: str
link: str
def extract_description(text:str)->str:
description, *junk = text.split('PCP Episode')
description = description.strip()
return description
def extract_shownotes(lines:List[str])->List[Shownote]:
""" Extracts the timestamps, titles, and links of each shownote."""
regex = re.compile("[\d]+:[\d]+(?:[:][\d]+)?")
shownotes = list()
for current_line, next_line in zip(lines[:-1], lines[1:]):
if regex.match(current_line):
_time, *_title = current_line.split(' ')
timestamp = Duration.from_string(_time)
title = " ".join(_title)
link = next_line
shownote = Shownote(timestamp, title, link)
shownotes.append(shownote)
return shownotes
if __name__ == "__main__":
sample = Path(__file__).parent / "Tourist Trap Stockholm Syndrome - The Pro Crastinators Podcast, Episode 119-toHfm6RyLYo.info.json"
data = json.loads(sample.read_text())
description = data['description']
#print(description)
pprint(extract_shownotes(description.split('\n')))
| [
"[email protected]"
] | |
892c6ea0089b84d37e35d19760bbe949a8fd271a | 9c6522db2db8c4c075e23f2953776384973891a8 | /cn.ao/py.ori.fmt/c0310.bin.py | edfcd15820f2b96d91aea65d3e7db6e48edd12f7 | [] | no_license | ZhenjianYang/ZeroAoVoiceScripts | b388c28b498049d7d4277b8344cdc098488fd258 | 7e0b696d743912739b855acb4306f1dcf564d6c0 | refs/heads/master | 2021-05-18T08:58:59.482674 | 2021-02-19T08:22:19 | 2021-02-19T08:22:19 | 94,624,272 | 12 | 5 | null | 2021-02-19T08:22:20 | 2017-06-17T13:06:15 | Python | UTF-8 | Python | false | false | 70,427 | py | from ScenarioHelper import *
def main():
CreateScenaFile(
"c0310.bin", # FileName
"c0310", # MapName
"c0310", # Location
0x002B, # MapIndex
"ed7150",
0x00002000, # Flags
("", "", "", "", "", ""), # include
0x00, # PlaceNameNumber
0x00, # PreInitFunctionIndex
b'\x00\xff\xff', # Unknown_51
# Information
[0, 0, -1000, 0, 0, 0, 24000, 500, 30, 45, 0, 360, 0, 0, 0, 0, 0, 1, 43, 0, 4, 0, 5],
)
BuildStringList((
"c0310", # 0
"海尔玛", # 1
"乔安娜", # 2
))
AddCharChip((
"chr/ch25800.itc", # 00
"chr/ch25700.itc", # 01
))
DeclNpc(0, 4059, 7760, 180, 257, 0x0, 0, 0, 0, 0, 2, 0, 6, 255, 0)
DeclNpc(-45349, 59, 3900, 360, 257, 0x0, 0, 1, 0, 0, 0, 0, 8, 255, 0)
DeclActor(-40820, 0, 40910, 1500, -40820, 1500, 40910, 0x007C, 0, 9, 0x0000)
ChipFrameInfo(296, 0) # 0
ScpFunction((
"Function_0_128", # 00, 0
"Function_1_1E0", # 01, 1
"Function_2_20B", # 02, 2
"Function_3_236", # 03, 3
"Function_4_261", # 04, 4
"Function_5_392", # 05, 5
"Function_6_43B", # 06, 6
"Function_7_14A9", # 07, 7
"Function_8_16A7", # 08, 8
"Function_9_2859", # 09, 9
"Function_10_33A5", # 0A, 10
"Function_11_3990", # 0B, 11
))
def Function_0_128(): pass
label("Function_0_128")
RunExpression(0x2, (scpexpr(EXPR_RAND), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_IMOD), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Switch(
(scpexpr(EXPR_GET_RESULT, 0x2), scpexpr(EXPR_END)),
(0, "loc_168"),
(1, "loc_174"),
(2, "loc_180"),
(3, "loc_18C"),
(4, "loc_198"),
(5, "loc_1A4"),
(6, "loc_1B0"),
(SWITCH_DEFAULT, "loc_1BC"),
)
label("loc_168")
OP_A0(0xFE, 1450, 0x0, 0xFB)
Jump("loc_1C8")
label("loc_174")
OP_A0(0xFE, 1550, 0x0, 0xFB)
Jump("loc_1C8")
label("loc_180")
OP_A0(0xFE, 1600, 0x0, 0xFB)
Jump("loc_1C8")
label("loc_18C")
OP_A0(0xFE, 1400, 0x0, 0xFB)
Jump("loc_1C8")
label("loc_198")
OP_A0(0xFE, 1650, 0x0, 0xFB)
Jump("loc_1C8")
label("loc_1A4")
OP_A0(0xFE, 1350, 0x0, 0xFB)
Jump("loc_1C8")
label("loc_1B0")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_1C8")
label("loc_1BC")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_1C8")
label("loc_1C8")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_1DF")
OP_A0(0xFE, 1500, 0x0, 0xFB)
Jump("loc_1C8")
label("loc_1DF")
Return()
# Function_0_128 end
def Function_1_1E0(): pass
label("Function_1_1E0")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_20A")
OP_94(0xFE, 0xFFFFF63C, 0x0, 0x9C4, 0x73A, 0x3E8)
Sleep(300)
Jump("Function_1_1E0")
label("loc_20A")
Return()
# Function_1_1E0 end
def Function_2_20B(): pass
label("Function_2_20B")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_235")
OP_94(0xFE, 0xFFFFF8B2, 0x1A36, 0x744, 0x26DE, 0x3E8)
Sleep(300)
Jump("Function_2_20B")
label("loc_235")
Return()
# Function_2_20B end
def Function_3_236(): pass
label("Function_3_236")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_260")
OP_94(0xFE, 0xA00A, 0xA05A, 0xB31A, 0xB220, 0x3E8)
Sleep(300)
Jump("Function_3_236")
label("loc_260")
Return()
# Function_3_236 end
def Function_4_261(): pass
label("Function_4_261")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_26F")
Jump("loc_391")
label("loc_26F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_2A5")
SetChrPos(0x8, 810, 0, 500, 270)
BeginChrThread(0x8, 0, 0, 0)
SetChrPos(0x9, -810, 0, 500, 90)
Jump("loc_391")
label("loc_2A5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_2B3")
Jump("loc_391")
label("loc_2B3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_2D2")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x8F, 0x0, 0x10)"), scpexpr(EXPR_END)), "loc_2CD")
SetChrFlags(0x9, 0x80)
label("loc_2CD")
Jump("loc_391")
label("loc_2D2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_2E0")
Jump("loc_391")
label("loc_2E0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_2EE")
Jump("loc_391")
label("loc_2EE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_301")
SetChrFlags(0x9, 0x80)
Jump("loc_391")
label("loc_301")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_30F")
Jump("loc_391")
label("loc_30F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_31D")
Jump("loc_391")
label("loc_31D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_32B")
Jump("loc_391")
label("loc_32B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 0)), scpexpr(EXPR_END)), "loc_339")
Jump("loc_391")
label("loc_339")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_347")
Jump("loc_391")
label("loc_347")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_355")
Jump("loc_391")
label("loc_355")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_379")
SetChrPos(0x9, -42190, 0, 48970, 0)
SetChrFlags(0x9, 0x10)
Jump("loc_391")
label("loc_379")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_391")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x134, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_391")
SetChrFlags(0x8, 0x10)
label("loc_391")
Return()
# Function_4_261 end
def Function_5_392(): pass
label("Function_5_392")
OP_65(0x0, 0x1)
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x87, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x87, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x87, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x177, 3)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_3BA")
OP_66(0x0, 0x1)
label("loc_3BA")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_PUSH_VALUE_INDEX, 0x4), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_3FF")
SetMapObjFrame(0xFF, "light01", 0x0, 0x1)
SetMapObjFrame(0xFF, "model05_light", 0x0, 0x1)
Sound(128, 1, 50, 0)
label("loc_3FF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 2)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_43A")
OP_7D(0xD2, 0xD2, 0xE6, 0x0, 0x0)
SetMapObjFrame(0xFF, "light01", 0x0, 0x1)
SetMapObjFrame(0xFF, "model05_light", 0x0, 0x1)
label("loc_43A")
Return()
# Function_5_392 end
def Function_6_43B(): pass
label("Function_6_43B")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_57F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4FF")
#C0001
ChrTalk(
0xFE,
(
"老爷好像正在兰花塔内\x01",
"指挥今后的行动。\x02",
)
)
CloseMessageWindow()
#C0002
ChrTalk(
0xFE,
(
"总统已经被拘捕,\x01",
"如今能引领克洛斯贝尔的\x01",
"只有老爷一个人了。\x02",
)
)
CloseMessageWindow()
#C0003
ChrTalk(
0xFE,
(
"虽然这肯定是很重的负担……\x01",
"但还是希望老爷能好好加油。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_57A")
label("loc_4FF")
#C0004
ChrTalk(
0xFE,
(
"总统已经被拘捕,\x01",
"如今能引领克洛斯贝尔的\x01",
"只有老爷一个人了。\x02",
)
)
CloseMessageWindow()
#C0005
ChrTalk(
0xFE,
(
"虽然这肯定是很重的负担……\x01",
"但还是希望老爷能好好加油。\x02",
)
)
CloseMessageWindow()
label("loc_57A")
Jump("loc_14A5")
label("loc_57F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_610")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1CC, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_59A")
Call(0, 7)
Jump("loc_60B")
label("loc_59A")
#C0006
ChrTalk(
0xFE,
(
"自从老爷和大小姐\x01",
"被软禁在米修拉姆之后,\x01",
"我就一直担心得坐立不安……\x02",
)
)
CloseMessageWindow()
#C0007
ChrTalk(
0xFE,
(
"能再次见到您,\x01",
"我也总算可以安心了。\x02",
)
)
CloseMessageWindow()
label("loc_60B")
Jump("loc_14A5")
label("loc_610")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_788")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x18C, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_721")
TurnDirection(0xFE, 0x102, 0)
#C0008
ChrTalk(
0xFE,
(
"……大小姐……\x01",
"老爷有和您联络\x01",
"过吗?\x02",
)
)
CloseMessageWindow()
#C0009
ChrTalk(
0x102,
"#00103F不,我这边也完全没消息……\x02",
)
CloseMessageWindow()
#C0010
ChrTalk(
0xFE,
"是吗……\x02",
)
CloseMessageWindow()
#C0011
ChrTalk(
0xFE,
(
"唔,总之……\x01",
"如果我了解到什么情况,\x01",
"一定会和您联络的。\x02",
)
)
CloseMessageWindow()
#C0012
ChrTalk(
0xFE,
(
"大小姐与各位就\x01",
"专心处理\x01",
"自己的工作吧。\x02",
)
)
CloseMessageWindow()
#C0013
ChrTalk(
0x102,
(
"#00100F嗯,拜托你了,\x01",
"海尔玛先生。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x18C, 2)
Jump("loc_783")
label("loc_721")
#C0014
ChrTalk(
0xFE,
(
"如果我了解到\x01",
"有关老爷的情况,\x01",
"一定会和您联络的。\x02",
)
)
CloseMessageWindow()
#C0015
ChrTalk(
0xFE,
(
"大小姐与各位就\x01",
"专心处理\x01",
"自己的工作吧。\x02",
)
)
CloseMessageWindow()
label("loc_783")
Jump("loc_14A5")
label("loc_788")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_8D2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_864")
#C0016
ChrTalk(
0xFE,
(
"……之前的那起袭击事件\x01",
"真是一场惨痛的经历。\x02",
)
)
CloseMessageWindow()
#C0017
ChrTalk(
0xFE,
(
"虽然重建工作\x01",
"总算是取得了一些进展……\x02",
)
)
CloseMessageWindow()
#C0018
ChrTalk(
0xFE,
(
"但现在还是有不少人\x01",
"无法从恐惧中解脱。\x02",
)
)
CloseMessageWindow()
#C0019
ChrTalk(
0xFE,
(
"为了防止那种事件再次发生,\x01",
"希望老爷和市长\x01",
"都要加油。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_8CD")
label("loc_864")
#C0020
ChrTalk(
0xFE,
(
"不久前的那起袭击事件\x01",
"真是一场惨痛的经历。\x02",
)
)
CloseMessageWindow()
#C0021
ChrTalk(
0xFE,
(
"为了防止那种事件再次发生,\x01",
"希望老爷和市长\x01",
"都要加油。\x02",
)
)
CloseMessageWindow()
label("loc_8CD")
Jump("loc_14A5")
label("loc_8D2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_A0B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_9A4")
#C0022
ChrTalk(
0xFE,
(
"老爷正在和市长\x01",
"一起研究玛因兹地区\x01",
"遭到占领事件的对策。\x02",
)
)
CloseMessageWindow()
#C0023
ChrTalk(
0xFE,
(
"据说,在武装集团面前,\x01",
"连警备队都束手无策,\x01",
"而且这样的状况还在持续……\x02",
)
)
CloseMessageWindow()
#C0024
ChrTalk(
0xFE,
(
"……真让人担心啊。\x01",
"但愿能尽早将事件解决。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_A06")
label("loc_9A4")
#C0025
ChrTalk(
0xFE,
(
"据说武装集团十分强悍,\x01",
"连警备队都束手无策……\x02",
)
)
CloseMessageWindow()
#C0026
ChrTalk(
0xFE,
(
"……真让人担心啊。\x01",
"但愿能尽早将事件解决。\x02",
)
)
CloseMessageWindow()
label("loc_A06")
Jump("loc_14A5")
label("loc_A0B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_B23")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_ABE")
#C0027
ChrTalk(
0xFE,
(
"昨天那起脱轨事故……\x01",
"呼,真是让人震惊。\x02",
)
)
CloseMessageWindow()
#C0028
ChrTalk(
0xFE,
(
"大家现在都议论纷纷,\x01",
"说事故发生的原因是落石或\x01",
"巨大怪物的袭击……\x02",
)
)
CloseMessageWindow()
#C0029
ChrTalk(
0xFE,
(
"唔……真正的原因\x01",
"究竟是什么呢?\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_B1E")
label("loc_ABE")
#C0030
ChrTalk(
0xFE,
"昨天那起脱轨事故真是让人震惊。\x02",
)
CloseMessageWindow()
#C0031
ChrTalk(
0xFE,
(
"市内流传着各种各样的传言……\x01",
"真正的原因\x01",
"究竟是什么呢?\x02",
)
)
CloseMessageWindow()
label("loc_B1E")
Jump("loc_14A5")
label("loc_B23")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_B6E")
#C0032
ChrTalk(
0xFE,
(
"好像有警笛声\x01",
"从西街那边传来……\x02",
)
)
CloseMessageWindow()
#C0033
ChrTalk(
0xFE,
"大概是我听错了吧。\x02",
)
CloseMessageWindow()
Jump("loc_14A5")
label("loc_B6E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_C0E")
#C0034
ChrTalk(
0xFE,
(
"独立的提案给社会各界\x01",
"都造成了一定影响,\x01",
"老爷正在努力制订应对措施。\x02",
)
)
CloseMessageWindow()
#C0035
ChrTalk(
0xFE,
(
"他今天也要和迪塔市长\x01",
"一起在兰花塔开会……\x01",
"希望他能注意自己的身体啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_14A5")
label("loc_C0E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_D5A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_CDE")
#C0036
ChrTalk(
0xFE,
(
"调查独立意向的居民投票活动\x01",
"已经渐渐临近了。\x02",
)
)
CloseMessageWindow()
#C0037
ChrTalk(
0xFE,
(
"关于这个问题,\x01",
"老爷认为应该采取\x01",
"慎重的态度来对待……\x02",
)
)
CloseMessageWindow()
#C0038
ChrTalk(
0xFE,
(
"而克洛斯贝尔的居民们\x01",
"最终又会做出怎样的选择呢……\x01",
"我对此也很有兴趣。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_D55")
label("loc_CDE")
#C0039
ChrTalk(
0xFE,
(
"调查独立意向的居民投票活动\x01",
"已经渐渐临近了。\x02",
)
)
CloseMessageWindow()
#C0040
ChrTalk(
0xFE,
(
"克洛斯贝尔的居民们\x01",
"最终会做出怎样的选择呢……\x01",
"我对此也很有兴趣。\x02",
)
)
CloseMessageWindow()
label("loc_D55")
Jump("loc_14A5")
label("loc_D5A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_DB7")
#C0041
ChrTalk(
0xFE,
(
"老爷今天也\x01",
"一大早就前往\x01",
"兰花塔了。\x02",
)
)
CloseMessageWindow()
#C0042
ChrTalk(
0xFE,
(
"希望今天的正式会议\x01",
"能顺利结束……\x02",
)
)
CloseMessageWindow()
Jump("loc_14A5")
label("loc_DB7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 0)), scpexpr(EXPR_END)), "loc_ED4")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_E6E")
#C0043
ChrTalk(
0xFE,
(
"老爷最近很忙,\x01",
"经常连家都不回,\x01",
"在市政厅过夜休息……\x02",
)
)
CloseMessageWindow()
#C0044
ChrTalk(
0xFE,
(
"刚才接到了联络,\x01",
"老爷今天总算\x01",
"要回家了。\x02",
)
)
CloseMessageWindow()
#C0045
ChrTalk(
0xFE,
(
"希望老爷养精蓄锐,\x01",
"为明天的正式会议\x01",
"做好准备。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_ECF")
label("loc_E6E")
#C0046
ChrTalk(
0xFE,
(
"今晚为老爷\x01",
"准备了营养价值\x01",
"很高的料理。\x02",
)
)
CloseMessageWindow()
#C0047
ChrTalk(
0xFE,
(
"希望老爷能养足精神,\x01",
"为明天的正式会议做好准备。\x02",
)
)
CloseMessageWindow()
label("loc_ECF")
Jump("loc_14A5")
label("loc_ED4")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_102A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_FA8")
#C0048
ChrTalk(
0xFE,
(
"在哈尔特曼担任议长的时期,\x01",
"老爷光是为了制衡帝国派与共和国派\x01",
"的议员,就耗尽了心神。\x02",
)
)
CloseMessageWindow()
#C0049
ChrTalk(
0xFE,
(
"但最近和新市长联手协力,\x01",
"总算能够与他们形成\x01",
"势均力敌之势……\x02",
)
)
CloseMessageWindow()
#C0050
ChrTalk(
0xFE,
(
"为此,我也感到\x01",
"十分高兴。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_1025")
label("loc_FA8")
#C0051
ChrTalk(
0xFE,
(
"迪塔先生能成为市长,\x01",
"也让我感到十分欣喜。\x02",
)
)
CloseMessageWindow()
#C0052
ChrTalk(
0xFE,
(
"在从明天开始的正式会议中……\x01",
"希望老爷和市长都能\x01",
"充分展现自己的政治手腕。\x02",
)
)
CloseMessageWindow()
label("loc_1025")
Jump("loc_14A5")
label("loc_102A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_10AE")
#C0053
ChrTalk(
0xFE,
(
"为了明日开始的通商会议,\x01",
"老爷和迪塔市长\x01",
"都在精心进行准备工作。\x02",
)
)
CloseMessageWindow()
#C0054
ChrTalk(
0xFE,
(
"老爷最近几乎\x01",
"都不回家了……\x01",
"真是让人担心啊。\x02",
)
)
CloseMessageWindow()
Jump("loc_14A5")
label("loc_10AE")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_1242")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_11AF")
#C0055
ChrTalk(
0xFE,
(
"不久前,有几个人搬到\x01",
"隔壁的房子居住了……\x01",
"他们很快就造成了很多问题。\x02",
)
)
CloseMessageWindow()
#C0056
ChrTalk(
0xFE,
(
"比如开着导力车在市内横冲直撞,\x01",
"半夜播放音量超大的音乐……\x01",
"这种行为实在是让人看不下去。\x02",
)
)
CloseMessageWindow()
#C0057
ChrTalk(
0xFE,
(
"但就算提出抗议,\x01",
"他们也充耳不闻……\x01",
"到底该怎么办才好呢……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 0)
Jump("loc_123D")
label("loc_11AF")
#C0058
ChrTalk(
0xFE,
(
"不久前,有几个人搬到\x01",
"隔壁的房子居住了……\x01",
"他们的行为实在是让人难以容忍。\x02",
)
)
CloseMessageWindow()
#C0059
ChrTalk(
0xFE,
(
"但就算提出抗议,\x01",
"他们也充耳不闻……\x01",
"到底该怎么办才好呢……\x02",
)
)
CloseMessageWindow()
label("loc_123D")
Jump("loc_14A5")
label("loc_1242")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_14A5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x134, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1440")
TurnDirection(0xFE, 0x102, 0)
#C0060
ChrTalk(
0xFE,
(
"这不是艾莉大小姐吗!\x01",
"欢迎您回来。\x02",
)
)
CloseMessageWindow()
#C0061
ChrTalk(
0x102,
"#00100F我回来了,海尔玛先生。\x02",
)
CloseMessageWindow()
#C0062
ChrTalk(
0x105,
(
"#10300F原来如此,\x01",
"这里就是艾莉的家啊。\x02",
)
)
CloseMessageWindow()
#C0063
ChrTalk(
0x109,
"#10102F真是一座大房子呢。\x02",
)
CloseMessageWindow()
#C0064
ChrTalk(
0x101,
(
"#00004F嗯……\x01",
"毕竟是麦克道尔议长\x01",
"的宅邸嘛。\x02",
)
)
CloseMessageWindow()
#C0065
ChrTalk(
0x102,
"#00109F呵呵,请大家不要拘束。\x02",
)
CloseMessageWindow()
#C0066
ChrTalk(
0xFE,
(
"是啊,各位都是\x01",
"大小姐的同事,\x01",
"请放松些,不必拘谨。\x02",
)
)
CloseMessageWindow()
#C0067
ChrTalk(
0xFE,
(
"特别任务支援科\x01",
"总算恢复工作了,\x01",
"接下来大概会非常繁忙……\x02",
)
)
CloseMessageWindow()
#C0068
ChrTalk(
0xFE,
(
"今后也请各位\x01",
"继续关照艾莉大小姐。\x02",
)
)
CloseMessageWindow()
#C0069
ChrTalk(
0x101,
"#00000F嗯,放心吧。\x02",
)
CloseMessageWindow()
#C0070
ChrTalk(
0x102,
(
"#00102F呵呵,谢谢了,海尔玛先生。\x01",
"我会继续努力的。\x02",
)
)
CloseMessageWindow()
ClearChrFlags(0xFE, 0x10)
SetScenarioFlags(0x134, 1)
Jump("loc_14A5")
label("loc_1440")
#C0071
ChrTalk(
0xFE,
(
"特别任务支援科\x01",
"总算恢复工作了,\x01",
"接下来大概会非常繁忙……\x02",
)
)
CloseMessageWindow()
#C0072
ChrTalk(
0xFE,
(
"今后也请各位\x01",
"继续关照艾莉大小姐。\x02",
)
)
CloseMessageWindow()
label("loc_14A5")
TalkEnd(0xFE)
Return()
# Function_6_43B end
def Function_7_14A9(): pass
label("Function_7_14A9")
OP_4B(0x8, 0xFF)
OP_4B(0x9, 0xFF)
TurnDirection(0x8, 0x0, 0)
TurnDirection(0x9, 0x0, 0)
#C0073
ChrTalk(
0x8,
"哦哦,各位……!\x02",
)
CloseMessageWindow()
#C0074
ChrTalk(
0x9,
(
"大、大小姐…………\x01",
"……艾莉大小姐……!!\x02",
)
)
CloseMessageWindow()
#C0075
ChrTalk(
0x102,
(
"#00100F我回来啦,海尔玛先生,乔安娜。\x01",
"……让你们担心了呢。\x02",
)
)
CloseMessageWindow()
#C0076
ChrTalk(
0x9,
"…………(哽咽)\x02",
)
CloseMessageWindow()
#C0077
ChrTalk(
0x8,
(
"自从老爷发表独立无效宣言之后,\x01",
"我一直都非常担心,\x01",
"不知你们是否平安无事……\x02",
)
)
CloseMessageWindow()
#C0078
ChrTalk(
0x8,
(
"能再次见到您,\x01",
"我也总算可以安心了。\x02",
)
)
CloseMessageWindow()
#C0079
ChrTalk(
0x102,
(
"#00100F呵呵,谢谢。\x02\x03",
"#00103F……不过,我们现在无论如何\x01",
"也必须要去某个地方。\x02\x03",
"#00101F请二位暂时留在这里等我们,\x01",
"可以吗?\x02",
)
)
CloseMessageWindow()
#C0080
ChrTalk(
0x8,
"嗯,当然。\x02",
)
CloseMessageWindow()
#C0081
ChrTalk(
0x9,
(
"艾莉大小姐,各位……\x01",
"请你们一定要小心。\x02",
)
)
CloseMessageWindow()
OP_4C(0x8, 0xFF)
OP_4C(0x9, 0xFF)
OP_93(0x8, 0x10E, 0x0)
OP_93(0x9, 0x5A, 0x0)
SetScenarioFlags(0x1CC, 7)
Return()
# Function_7_14A9 end
def Function_8_16A7(): pass
label("Function_8_16A7")
TalkBegin(0xFE)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A7, 1)), scpexpr(EXPR_END)), "loc_17FD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_17B4")
#C0082
ChrTalk(
0xFE,
(
"艾莉大小姐,\x01",
"你们要前往那棵\x01",
"诡异的大树吧……\x02",
)
)
CloseMessageWindow()
#C0083
ChrTalk(
0xFE,
"…………………………\x02",
)
CloseMessageWindow()
#C0084
ChrTalk(
0x102,
(
"#00104F别担心,乔安娜,\x01",
"我们一定会平安归来的。\x02",
)
)
CloseMessageWindow()
#C0085
ChrTalk(
0xFE,
(
"……嗯。\x01",
"至今为止,大小姐每次\x01",
"都平安回到了这里……\x02",
)
)
CloseMessageWindow()
#C0086
ChrTalk(
0xFE,
(
"所以我相信您这次也不会有事的。\x01",
"……请一定要小心。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_17F8")
label("loc_17B4")
#C0087
ChrTalk(
0xFE,
(
"我相信艾莉大小姐和各位\x01",
"一定能平安归来。\x01",
"……请大家一定要小心。\x02",
)
)
CloseMessageWindow()
label("loc_17F8")
Jump("loc_2855")
label("loc_17FD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1A5, 3)), scpexpr(EXPR_END)), "loc_1864")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1CC, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1818")
Call(0, 7)
Jump("loc_185F")
label("loc_1818")
#C0088
ChrTalk(
0xFE,
"外面好像非常危险……\x02",
)
CloseMessageWindow()
#C0089
ChrTalk(
0xFE,
(
"艾莉大小姐,各位……\x01",
"请你们一定要小心。\x02",
)
)
CloseMessageWindow()
label("loc_185F")
Jump("loc_2855")
label("loc_1864")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x182, 1)), scpexpr(EXPR_END)), "loc_1A20")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x18C, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_19C2")
#C0090
ChrTalk(
0xFE,
(
"老爷他……\x01",
"到底到什么地方\x01",
"去了呢……?\x02",
)
)
CloseMessageWindow()
#C0091
ChrTalk(
0xFE,
"我好担心……\x02",
)
CloseMessageWindow()
#C0092
ChrTalk(
0x101,
(
"#00003F在刚才的演说现场直播里\x01",
"也没有见到议长呢……\x02",
)
)
CloseMessageWindow()
#C0093
ChrTalk(
0x102,
"#00108F外公……到底在什么地方……\x02",
)
CloseMessageWindow()
TurnDirection(0xFE, 0x102, 500)
#C0094
ChrTalk(
0xFE,
(
"……啊啊,对不起!\x01",
"我竟然口无遮拦,\x01",
"害得大小姐也开始不安了……\x02",
)
)
CloseMessageWindow()
#C0095
ChrTalk(
0x102,
(
"#00103F……哪里,我不要紧。\x02\x03",
"#00100F乔安娜,你也不要\x01",
"太过担心哦。\x02",
)
)
CloseMessageWindow()
#C0096
ChrTalk(
0xFE,
"明、明白了……\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x18C, 3)
Jump("loc_1A1B")
label("loc_19C2")
#C0097
ChrTalk(
0xFE,
(
"……我竟然口无遮拦,\x01",
"害得大小姐\x01",
"也开始不安了……\x02",
)
)
CloseMessageWindow()
#C0098
ChrTalk(
0xFE,
(
"老爷……\x01",
"一定会平安无事的……\x02",
)
)
CloseMessageWindow()
label("loc_1A1B")
Jump("loc_2855")
label("loc_1A20")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x180, 2)), scpexpr(EXPR_END)), "loc_1B27")
Jc((scpexpr(EXPR_EXEC_OP, "OP_2A(0x8F, 0x0, 0x2)"), scpexpr(EXPR_EXEC_OP, "OP_2A(0x8F, 0x0, 0x10)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_EXEC_OP, "OP_2A(0x8F, 0x0, 0x40)"), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x198, 4)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x199, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_1A52")
Call(0, 10)
Return()
label("loc_1A52")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x199, 6)), scpexpr(EXPR_END)), "loc_1AC9")
#C0099
ChrTalk(
0xFE,
(
"……我还是…………\x01",
"决定参加\x01",
"职业女性选秀活动。\x02",
)
)
CloseMessageWindow()
#C0100
ChrTalk(
0xFE,
(
"……活动开始前请通知我吧,\x01",
"我会立刻赶过去的……\x02",
)
)
CloseMessageWindow()
Jump("loc_1B22")
label("loc_1AC9")
#C0101
ChrTalk(
0xFE,
(
"听说今天要在\x01",
"行政区举办一场\x01",
"慈善宴会……\x02",
)
)
CloseMessageWindow()
#C0102
ChrTalk(
0xFE,
(
"……有没有什么\x01",
"我能帮上忙的事情呢……\x02",
)
)
CloseMessageWindow()
label("loc_1B22")
Jump("loc_2855")
label("loc_1B27")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x165, 5)), scpexpr(EXPR_END)), "loc_1B89")
#C0103
ChrTalk(
0xFE,
"矿山镇竟然被占领了……\x02",
)
CloseMessageWindow()
#C0104
ChrTalk(
0xFE,
(
"……我好害怕……\x01",
"总觉得接下来\x01",
"还会发生什么事情……\x02",
)
)
CloseMessageWindow()
Jump("loc_2855")
label("loc_1B89")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x164, 0)), scpexpr(EXPR_END)), "loc_1BD4")
#C0105
ChrTalk(
0xFE,
"最近经常下雨啊……\x02",
)
CloseMessageWindow()
#C0106
ChrTalk(
0xFE,
(
"……洗好的衣物都晒不干。\x01",
"呼……\x02",
)
)
CloseMessageWindow()
Jump("loc_2855")
label("loc_1BD4")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 5)), scpexpr(EXPR_END)), "loc_1BE2")
Jump("loc_2855")
label("loc_1BE2")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x162, 0)), scpexpr(EXPR_END)), "loc_1D6E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1D10")
#C0107
ChrTalk(
0xFE,
(
"老爷最近特别繁忙,\x01",
"得用心为他准备些有营养的食物……\x02",
)
)
CloseMessageWindow()
#C0108
ChrTalk(
0xFE,
"……做什么料理才好呢?\x02",
)
CloseMessageWindow()
#C0109
ChrTalk(
0x102,
(
"#00100F这个嘛……\x01",
"羔羊肉如何呢?\x02\x03",
"#00104F高蛋白,低热量,\x01",
"而且应该很合外公的口味。\x02",
)
)
CloseMessageWindow()
TurnDirection(0xFE, 0x102, 500)
#C0110
ChrTalk(
0xFE,
(
"……似乎不错呢……\x01",
"真不愧是艾莉大小姐。\x02",
)
)
CloseMessageWindow()
#C0111
ChrTalk(
0x102,
(
"#00109F啊、啊哈哈,\x01",
"这点小事不值得夸奖啦……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_1D69")
label("loc_1D10")
#C0112
ChrTalk(
0xFE,
(
"我的饭量很小,\x01",
"一般不吃肉,\x01",
"不过做给老爷应该不错呢。\x02",
)
)
CloseMessageWindow()
#C0113
ChrTalk(
0xFE,
(
"一会得去百货店\x01",
"买食材……\x02",
)
)
CloseMessageWindow()
label("loc_1D69")
Jump("loc_2855")
label("loc_1D6E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x160, 0)), scpexpr(EXPR_END)), "loc_1EC0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1E94")
#C0114
ChrTalk(
0xFE,
(
"听说在前不久召开的通商会议中,\x01",
"有恐怖分子发动了袭击……\x01",
"听到这消息时,我的心脏都快停止跳动了。\x02",
)
)
CloseMessageWindow()
#C0115
ChrTalk(
0xFE,
(
"自那之后,我一直非常担心\x01",
"艾莉大小姐和老爷……\x02",
)
)
CloseMessageWindow()
#C0116
ChrTalk(
0x102,
(
"#00103F乔安娜……\x01",
"你不用那么担心的。\x02\x03",
"#00100F有支援科的同伴陪着我……\x01",
"一定不会出什么事的。\x02",
)
)
CloseMessageWindow()
#C0117
ChrTalk(
0xFE,
"嗯……是啊……\x02",
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_1EBB")
label("loc_1E94")
#C0118
ChrTalk(
0xFE,
(
"各位……\x01",
"艾莉大小姐就拜托你们了。\x02",
)
)
CloseMessageWindow()
label("loc_1EBB")
Jump("loc_2855")
label("loc_1EC0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 5)), scpexpr(EXPR_END)), "loc_1F17")
#C0119
ChrTalk(
0xFE,
(
"艾莉大小姐……\x01",
"市里今天好像也维持着戒严状态。\x02",
)
)
CloseMessageWindow()
#C0120
ChrTalk(
0xFE,
"请您一定小心……\x02",
)
CloseMessageWindow()
Jump("loc_2855")
label("loc_1F17")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x141, 0)), scpexpr(EXPR_END)), "loc_208D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2002")
#C0121
ChrTalk(
0xFE,
(
"今天的晚餐\x01",
"是老爷最喜欢的\x01",
"苦西红柿料理。\x02",
)
)
CloseMessageWindow()
#C0122
ChrTalk(
0xFE,
(
"苦西红柿沙拉、\x01",
"苦西红柿酱的薏面,\x01",
"还有100%浓度的苦西红柿汁……\x02",
)
)
CloseMessageWindow()
#C0123
ChrTalk(
0x101,
(
"#00005F(哇……\x01",
" 好极端的菜单啊。)\x02",
)
)
CloseMessageWindow()
#C0124
ChrTalk(
0x102,
(
"#00106F(外公竟然那么喜欢\x01",
" 苦西红柿……)\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_2088")
label("loc_2002")
#C0125
ChrTalk(
0xFE,
(
"我今天做了老爷最喜欢的\x01",
"苦西红柿料理。\x02",
)
)
CloseMessageWindow()
#C0126
ChrTalk(
0xFE,
(
"听说吃苦西红柿有利于健康,\x01",
"我也准备忍耐着\x01",
"试试……\x02",
)
)
CloseMessageWindow()
#C0127
ChrTalk(
0x102,
"#00105F可、可不要太勉强哦……\x02",
)
CloseMessageWindow()
label("loc_2088")
Jump("loc_2855")
label("loc_208D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 4)), scpexpr(EXPR_END)), "loc_21D3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2151")
#C0128
ChrTalk(
0xFE,
(
"今天早上,我正好在门口\x01",
"遇到了住在隔壁的那几个人……\x02",
)
)
CloseMessageWindow()
#C0129
ChrTalk(
0xFE,
(
"他们突然就对我发出邀请,\x01",
"说『一起去兜风吧』。\x02",
)
)
CloseMessageWindow()
#C0130
ChrTalk(
0xFE,
(
"我自然是礼貌地回绝了,\x01",
"总觉得那些人实在是欠缺教养啊……\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
Jump("loc_21CE")
label("loc_2151")
#C0131
ChrTalk(
0xFE,
(
"今天早上,住在隔壁的\x01",
"那几个人突然邀请我\x01",
"和他们一起去兜风。\x02",
)
)
CloseMessageWindow()
#C0132
ChrTalk(
0xFE,
(
"我自然是礼貌地回绝了,\x01",
"总觉得那些人实在是欠缺教养啊……\x02",
)
)
CloseMessageWindow()
label("loc_21CE")
Jump("loc_2855")
label("loc_21D3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x140, 0)), scpexpr(EXPR_END)), "loc_224E")
#C0133
ChrTalk(
0xFE,
(
"新市政厅大楼\x01",
"明天就要正式揭幕了……\x02",
)
)
CloseMessageWindow()
#C0134
ChrTalk(
0xFE,
(
"就算隔着帷幕,\x01",
"都能充分感受到它的魄力,\x01",
"简直让人头昏目眩呢……\x02",
)
)
CloseMessageWindow()
Jump("loc_2855")
label("loc_224E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x128, 1)), scpexpr(EXPR_END)), "loc_238B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2337")
#C0135
ChrTalk(
0xFE,
"(吱吱……吱吱……)\x02",
)
CloseMessageWindow()
#C0136
ChrTalk(
0x102,
(
"#00105F哎,乔安娜,\x01",
"你在窗户上画东西吗?\x02",
)
)
CloseMessageWindow()
OP_63(0x9, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
TurnDirection(0x9, 0x102, 1000)
Sleep(1000)
#C0137
ChrTalk(
0xFE,
(
"……窗、窗户上蒙了一层白雾,\x01",
"所以我不由自主就……\x02",
)
)
CloseMessageWindow()
#C0138
ChrTalk(
0xFE,
(
"失、失礼了,\x01",
"我这就继续做扫除。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x0, 1)
ClearChrFlags(0x9, 0x10)
Jump("loc_2386")
label("loc_2337")
#C0139
ChrTalk(
0xFE,
(
"不知为何,只要听到雨声,\x01",
"我就会分心走神……\x02",
)
)
CloseMessageWindow()
#C0140
ChrTalk(
0xFE,
"唉,真是不喜欢下雨天啊。\x02",
)
CloseMessageWindow()
label("loc_2386")
Jump("loc_2855")
label("loc_238B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x126, 1)), scpexpr(EXPR_END)), "loc_2855")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x134, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_27DF")
OP_63(0xFE, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
TurnDirection(0x9, 0x102, 0)
Sleep(1000)
#C0141
ChrTalk(
0xFE,
"啊……\x02",
)
CloseMessageWindow()
#C0142
ChrTalk(
0xFE,
"欢迎回来,艾莉大小姐。\x02",
)
CloseMessageWindow()
#C0143
ChrTalk(
0x102,
(
"#00100F我回来了,乔安娜,\x01",
"今天没有什么异常情况吧?\x02",
)
)
CloseMessageWindow()
#C0144
ChrTalk(
0xFE,
"是的……托您的福。\x02",
)
CloseMessageWindow()
#C0145
ChrTalk(
0xFE,
(
"大小姐外出旅行归来,\x01",
"我也总算可以安心了。\x02",
)
)
CloseMessageWindow()
#C0146
ChrTalk(
0x102,
(
"#00102F呵呵,乔安娜,你可真是的……\x01",
"根本不用那么担心啊。\x02",
)
)
CloseMessageWindow()
#C0147
ChrTalk(
0xFE,
(
"不,对我来说,大小姐\x01",
"和老爷就是一切……\x02",
)
)
CloseMessageWindow()
#C0148
ChrTalk(
0x105,
"#10309F呵呵,真是一位关怀主人的女仆小姐啊。\x02",
)
CloseMessageWindow()
OP_63(0xFE, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1000)
OP_64(0xFE)
#C0149
ChrTalk(
0xFE,
(
"……那个,艾莉大小姐,\x01",
"前几天收到了先生和小姐\x01",
"寄来的信……\x02",
)
)
CloseMessageWindow()
OP_63(0x0, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x1, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x2, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_63(0x3, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
#C0150
ChrTalk(
0x101,
"#00005F那是……\x02",
)
CloseMessageWindow()
#C0151
ChrTalk(
0x102,
(
"#00103F……嗯,是我分别居住在共和国\x01",
"和帝国的父母。\x02\x03",
"#00100F他们以前也时常来信,\x01",
"自从教团那起事件结束之后,\x01",
"似乎寄得比以前更加频繁了。\x02\x03",
"#00104F信中内容主要都是表达\x01",
"对我和外公的关心,\x01",
"最近都成为我的心灵支柱之一了呢。\x02",
)
)
CloseMessageWindow()
#C0152
ChrTalk(
0x109,
"#10105F这样啊……\x02",
)
CloseMessageWindow()
#C0153
ChrTalk(
0x105,
"#10303F(……亲人……吗……)\x02",
)
CloseMessageWindow()
#C0154
ChrTalk(
0x102,
(
"#00100F乔安娜,我稍后会去看的,\x01",
"请你帮我仔细保管好哦。\x02",
)
)
CloseMessageWindow()
#C0155
ChrTalk(
0xFE,
"是……谨遵吩咐。\x02",
)
CloseMessageWindow()
#C0156
ChrTalk(
0xFE,
(
"那个,听说特别任务支援科\x01",
"已经恢复工作了……\x01",
"请您一定要注意身体。\x02",
)
)
CloseMessageWindow()
#C0157
ChrTalk(
0xFE,
(
"老爷、先生以及小姐……\x01",
"大家全都在挂念着大小姐。\x01",
"我自然也是一样。\x02",
)
)
CloseMessageWindow()
#C0158
ChrTalk(
0x102,
(
"#00109F呵呵,我明白的。\x01",
"谢谢你,乔安娜。\x02",
)
)
CloseMessageWindow()
SetScenarioFlags(0x134, 2)
Jump("loc_2855")
label("loc_27DF")
TurnDirection(0x9, 0x102, 0)
#C0159
ChrTalk(
0xFE,
(
"艾莉大小姐,\x01",
"请一定要注意保重身体。\x02",
)
)
CloseMessageWindow()
#C0160
ChrTalk(
0xFE,
(
"老爷、先生以及小姐……\x01",
"大家全都在挂念着大小姐。\x01",
"我自然也是一样。\x02",
)
)
CloseMessageWindow()
label("loc_2855")
TalkEnd(0xFE)
Return()
# Function_8_16A7 end
def Function_9_2859(): pass
label("Function_9_2859")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x177, 5)), scpexpr(EXPR_END)), "loc_28F6")
TalkBegin(0xFF)
FadeToDark(300, 0, 100)
OP_0D()
SetMessageWindowPos(-1, 30, -1, -1)
SetChrName("")
#A0161
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
scpstr(0x6),
scpstr(0x18),
"#1K要阅读昆西公司的宣传手册吗?\x07\x00\x02",
)
)
Menu(
0,
-1,
-1,
1,
(
"阅读\x01", # 0
"不阅读\x01", # 1
)
)
MenuEnd(0x0)
OP_60(0x0)
OP_57(0x0)
SetMessageWindowPos(14, 280, 60, 3)
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_28E5")
Call(0, 11)
TalkEnd(0xFF)
Jump("loc_28F1")
label("loc_28E5")
FadeToBright(300, 0)
TalkEnd(0xFF)
label("loc_28F1")
Jump("loc_33A4")
label("loc_28F6")
EventBegin(0x0)
FadeToDark(1000, 0, -1)
OP_0D()
OP_68(-41830, 1500, 40450, 0)
MoveCamera(52, 27, 0, 0)
OP_6E(400, 0)
SetCameraDistance(18820, 0)
SetChrPos(0x101, -43440, 60, 40740, 90)
SetChrPos(0x102, -41310, 0, 40520, 90)
SetChrPos(0x103, -42890, 0, 39580, 45)
SetChrPos(0x104, -42860, 60, 41860, 135)
SetChrPos(0x109, -42020, 0, 38900, 0)
SetChrPos(0x105, -41650, 0, 42460, 180)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
FadeToBright(1000, 0)
OP_0D()
#C0162
ChrTalk(
0x102,
"#00105F唔……找到了。\x02",
)
CloseMessageWindow()
Sound(802, 0, 100, 0)
Sleep(400)
OP_93(0x102, 0x10E, 0x1F4)
#C0163
ChrTalk(
0x102,
(
"#00100F这就是昆西公司\x01",
"的宣传手册。\x02",
)
)
CloseMessageWindow()
#C0164
ChrTalk(
0x104,
(
"#00305F嘿……\x01",
"装订得很精美啊。\x02\x03",
"#00300F看起来并不像是\x01",
"普通的资料呢。\x02",
)
)
CloseMessageWindow()
#C0165
ChrTalk(
0x103,
(
"#00200F只有大企业才会\x01",
"在这种细节方面如此讲究。\x02\x03",
"看来这本手册中的内容\x01",
"有很高的可信度。\x02",
)
)
CloseMessageWindow()
#C0166
ChrTalk(
0x102,
"#00109F呵呵,那就好。\x02",
)
CloseMessageWindow()
#C0167
ChrTalk(
0x101,
(
"#00001F好……\x01",
"我们先来粗略浏览一下吧。\x02",
)
)
CloseMessageWindow()
Call(0, 11)
OP_63(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x102, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x103, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x104, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x109, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
OP_63(0x105, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x101)
OP_64(0x102)
OP_64(0x103)
OP_64(0x104)
OP_64(0x109)
OP_64(0x105)
#C0168
ChrTalk(
0x105,
(
"#10303F嗯,已经大致看了一遍……\x01",
"但并没找到什么重要资料呢。\x02",
)
)
CloseMessageWindow()
#C0169
ChrTalk(
0x109,
(
"#10105F有没有发现什么与敏涅斯的话\x01",
"有矛盾的内容呢……?\x02",
)
)
CloseMessageWindow()
#C0170
ChrTalk(
0x102,
(
"#00106F唔~这个……\x01",
"在这种资料中果然还是\x01",
"不会有什么收获……\x02",
)
)
CloseMessageWindow()
#C0171
ChrTalk(
0x101,
"#00003F不……我发现矛盾了。\x02",
)
CloseMessageWindow()
OP_63(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x103, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x104, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x109, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(50)
OP_63(0x105, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
def lambda_2CC4():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x103, 1, lambda_2CC4)
Sleep(50)
def lambda_2CD4():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x104, 1, lambda_2CD4)
Sleep(50)
def lambda_2CE4():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x109, 1, lambda_2CE4)
Sleep(50)
def lambda_2CF4():
TurnDirection(0xFE, 0x101, 500)
ExitThread()
QueueWorkItem(0x105, 1, lambda_2CF4)
#C0172
ChrTalk(
0x103,
"#00205F……真的吗?\x02",
)
CloseMessageWindow()
#C0173
ChrTalk(
0x104,
(
"#00309F哈哈,你还是\x01",
"这么靠得住啊。\x02\x03",
"#00300F那就说说吧,到底是什么矛盾?\x02",
)
)
CloseMessageWindow()
#C0174
ChrTalk(
0x101,
(
"#00004F只要仔细回想一下我们昨天\x01",
"在酒店中与敏涅斯的对话,\x01",
"也就不难得出答案了。\x02\x03",
"#00000F敏涅斯随口说出的一句牢骚话……\x01",
"与手册中的内容存在着明显矛盾。\x02\x03",
"那正是敏涅斯\x01",
"并非『昆西公司董事』\x01",
"的证据……\x02",
)
)
CloseMessageWindow()
#C0175
ChrTalk(
0x109,
(
"#10105F这、这本手册中\x01",
"竟然有那么重要的线索……?\x02",
)
)
CloseMessageWindow()
#C0176
ChrTalk(
0x101,
"#00000F嗯,那句话就是——\x02",
)
CloseMessageWindow()
#C0177
ChrTalk(
0x105,
(
"#10304F等一下,\x01",
"暂时还是不要说出来了。\x02",
)
)
CloseMessageWindow()
def lambda_2EAF():
TurnDirection(0xFE, 0x105, 500)
ExitThread()
QueueWorkItem(0x101, 1, lambda_2EAF)
OP_63(0x101, 0x0, 2000, 0x0, 0x1, 0xFA, 0x2)
Sound(29, 0, 100, 0)
Sleep(1000)
#C0178
ChrTalk(
0x101,
"#00005F哎……为什么呢?\x02",
)
CloseMessageWindow()
#C0179
ChrTalk(
0x105,
(
"#10300F呵呵,只有你一个人想到答案,\x01",
"未免让人不甘心。\x02\x03",
"#10309F所以,在揭穿敏涅斯\x01",
"之前暂时保密,就当作\x01",
"留给大家的作业如何?\x02",
)
)
CloseMessageWindow()
#C0180
ChrTalk(
0x101,
(
"#00006F那、那个……\x01",
"这又不是在做游戏……\x02",
)
)
CloseMessageWindow()
#C0181
ChrTalk(
0x103,
(
"#00203F不,我认为瓦吉先生\x01",
"说的很有道理。\x02",
)
)
CloseMessageWindow()
def lambda_2FCF():
TurnDirection(0xFE, 0x103, 500)
ExitThread()
QueueWorkItem(0x101, 1, lambda_2FCF)
Sleep(100)
#C0182
ChrTalk(
0x103,
(
"#00203F罗伊德前辈的想法\x01",
"也存在错误的可能性,\x01",
"如果现在就统一意见,多少有些危险。\x02\x03",
"#00211F而且,每次都\x01",
"被罗伊德前辈比下去,\x01",
"实在是让人有些不爽。\x02",
)
)
CloseMessageWindow()
OP_63(0x101, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sound(23, 0, 100, 0)
Sleep(1000)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x20, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_314C")
RunExpression(0x0, (scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
FadeToDark(300, 0, 100)
OP_0D()
SetMessageWindowPos(-1, 30, -1, -1)
SetChrName("")
#A0183
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
scpstr(0x6),
scpstr(0x18),
"◆IBC事件(测试用)\x07\x00\x02",
)
)
Menu(
0,
-1,
-1,
0,
(
"【不做变更】\x01", # 0
"【已调查】\x01", # 1
"【未调查】\x01", # 2
)
)
MenuEnd(0x0)
OP_60(0x0)
OP_57(0x0)
SetMessageWindowPos(14, 280, 60, 3)
FadeToBright(300, 0)
OP_0D()
Switch(
(scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_END)),
(0, "loc_3137"),
(1, "loc_313C"),
(2, "loc_3144"),
(SWITCH_DEFAULT, "loc_314C"),
)
label("loc_3137")
Jump("loc_314C")
label("loc_313C")
SetScenarioFlags(0x177, 4)
Jump("loc_314C")
label("loc_3144")
ClearScenarioFlags(0x177, 4)
Jump("loc_314C")
label("loc_314C")
OP_29(0x87, 0x1, 0x3)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x177, 4)), scpexpr(EXPR_END)), "loc_327F")
#C0184
ChrTalk(
0x101,
(
"#00006F(后面那些才是她的真心话吧……)\x02\x03",
"#00001F我、我明白了。\x01",
"既然如此,这个问题\x01",
"就留给大家继续思考……\x02\x03",
"至于这本资料中的要点部分,\x01",
"最好记录在调查手册中。\x02\x03",
"#00003F好……我们已经收集到\x01",
"不少可以证明敏涅斯\x01",
"行事可疑的证据了。\x02\x03",
"#00000F先回哈罗德\x01",
"先生家吧。\x02",
)
)
CloseMessageWindow()
#C0185
ChrTalk(
0x102,
"#00100F嗯,走吧。\x02",
)
CloseMessageWindow()
OP_29(0x87, 0x1, 0x4)
Jump("loc_336B")
label("loc_327F")
#C0186
ChrTalk(
0x101,
(
"#00006F(后面那些才是她的真心话吧……)\x02\x03",
"#00001F我、我明白了。\x01",
"既然如此,这个问题\x01",
"就留给大家继续思考……\x02\x03",
"至于这本资料中的要点部分,\x01",
"最好记录在调查手册中。\x02\x03",
"#00003F……接下来还要去IBC调查,\x01",
"尽快行动吧。\x02",
)
)
CloseMessageWindow()
#C0187
ChrTalk(
0x102,
"#00100F嗯,明白了。\x02",
)
CloseMessageWindow()
label("loc_336B")
FadeToDark(1000, 0, -1)
OP_0D()
SetScenarioFlags(0x177, 5)
SetChrPos(0x0, -43000, 60, 40720, 90)
OP_69(0xFF, 0x0)
SetChrFlags(0x4, 0x80)
SetChrBattleFlags(0x4, 0x8000)
SetChrFlags(0x5, 0x80)
SetChrBattleFlags(0x5, 0x8000)
EventEnd(0x5)
label("loc_33A4")
Return()
# Function_9_2859 end
def Function_10_33A5(): pass
label("Function_10_33A5")
EventBegin(0x0)
Fade(500)
OP_68(-45700, 1560, 2610, 0)
MoveCamera(38, 32, 0, 0)
OP_6E(340, 0)
SetCameraDistance(23160, 0)
SetChrPos(0x101, -44780, 60, 2500, 0)
SetChrPos(0x102, -45960, 0, 2500, 0)
SetChrPos(0x103, -46730, 0, 1660, 0)
SetChrPos(0x104, -44030, 0, 1620, 0)
SetChrPos(0x105, -45860, 0, 980, 0)
SetChrPos(0x109, -44820, 0, 940, 0)
ClearChrFlags(0x4, 0x80)
ClearChrBattleFlags(0x4, 0x8000)
ClearChrFlags(0x5, 0x80)
ClearChrBattleFlags(0x5, 0x8000)
OP_4B(0x9, 0xFF)
OP_93(0x9, 0xB4, 0x0)
OP_0D()
#C0188
ChrTalk(
0x9,
(
"啊……\x01",
"艾莉大小姐,各位……\x02",
)
)
CloseMessageWindow()
#C0189
ChrTalk(
0x105,
(
"#10300F(职业女性选秀活动中的『女仆』……\x01",
" 邀请她来担当如何?)\x02",
)
)
CloseMessageWindow()
#C0190
ChrTalk(
0x101,
(
"#00003F(是啊……\x01",
" 这主意不错。)\x02\x03",
"#00000F(艾莉,\x01",
" 你去问问她可以吗?)\x02",
)
)
CloseMessageWindow()
#C0191
ChrTalk(
0x102,
(
"#00102F(明白了,\x01",
" 不过我觉得很难成功……)\x02\x03",
"#00100F那个,乔安娜,\x01",
"有件事情想请你帮忙……\x02",
)
)
CloseMessageWindow()
#C0192
ChrTalk(
0x9,
(
"好的……\x01",
"只要是艾莉大小姐的请求,\x01",
"无论要我做什么都可以。\x02",
)
)
CloseMessageWindow()
SetChrName("")
#A0193
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"艾莉邀请乔安娜参加\x01",
"慈善宴会中的职业女性选秀活动。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
OP_5A()
#C0194
ChrTalk(
0x9,
(
"……啊……\x01",
"选、选秀………………\x02",
)
)
CloseMessageWindow()
OP_63(0x9, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(2000)
OP_64(0x9)
OP_82(0x64, 0x0, 0xBB8, 0x12C)
#C0195
ChrTalk(
0x9,
"#4S……咦咦咦咦咦咦咦!?\x02",
)
CloseMessageWindow()
#C0196
ChrTalk(
0x103,
"#00205F……好像很吃惊呢。\x02",
)
CloseMessageWindow()
#C0197
ChrTalk(
0x9,
(
"唔、唔唔,我不行的……\x01",
"我怎么能参加什么职业女性选秀……\x02",
)
)
CloseMessageWindow()
#C0198
ChrTalk(
0x104,
(
"#00302F不不不,绝对没问题的,\x01",
"大哥哥我可以向你保证。\x02",
)
)
CloseMessageWindow()
#C0199
ChrTalk(
0x101,
"#00006F你拿什么保证啊……\x02",
)
CloseMessageWindow()
#C0200
ChrTalk(
0x102,
(
"#00105F那、那个,乔安娜,\x01",
"不用太介意哦。\x02\x03",
"#00103F我们再去找找,\x01",
"应该会有其他的\x01",
"女仆小姐愿意参加……\x02",
)
)
CloseMessageWindow()
OP_63(0x9, 0x0, 2000, 0x2, 0x7, 0x50, 0x1)
Sound(28, 0, 100, 0)
Sleep(1000)
#C0201
ChrTalk(
0x9,
"………………………………\x02",
)
CloseMessageWindow()
#C0202
ChrTalk(
0x9,
(
"……那个,我…………\x01",
"还是让我参加吧。\x02",
)
)
CloseMessageWindow()
#C0203
ChrTalk(
0x109,
(
"#10105F主、主意变得好快啊。\x01",
"这虽然再好不过,可是你……\x02",
)
)
CloseMessageWindow()
#C0204
ChrTalk(
0x9,
(
"因、因为我才是……\x01",
"……大小姐的女仆…………\x02",
)
)
CloseMessageWindow()
#C0205
ChrTalk(
0x102,
(
"#00102F呵呵,谢谢啦,乔安娜。\x01",
"不过没必要勉强自己哦。\x02",
)
)
CloseMessageWindow()
#C0206
ChrTalk(
0x9,
(
"……活动开始前请通知我吧,\x01",
"我会立刻赶过去的…………\x02",
)
)
CloseMessageWindow()
#C0207
ChrTalk(
0x101,
"#00000F嗯,拜托你了。\x02",
)
CloseMessageWindow()
OP_29(0x8F, 0x1, 0x3)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x199, 4)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x199, 5)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x199, 7)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_3956")
#C0208
ChrTalk(
0x101,
(
"#00003F好,我们总算\x01",
"把参选者找齐了。\x02\x03",
"#00000F这就去市民会馆,\x01",
"向洛依先生他们报告吧。\x02",
)
)
CloseMessageWindow()
OP_29(0x8F, 0x1, 0x5)
label("loc_3956")
SetScenarioFlags(0x199, 6)
OP_4C(0x9, 0xFF)
OP_93(0x9, 0x0, 0x0)
OP_69(0xFF, 0x0)
SetChrPos(0x0, -45350, 60, 2400, 180)
SetChrFlags(0x4, 0x80)
SetChrBattleFlags(0x4, 0x8000)
SetChrFlags(0x5, 0x80)
SetChrBattleFlags(0x5, 0x8000)
EventEnd(0x5)
Return()
# Function_10_33A5 end
def Function_11_3990(): pass
label("Function_11_3990")
FadeToDark(300, 0, 100)
OP_0D()
SetMessageWindowPos(-1, -1, -1, -1)
Sound(18, 0, 100, 0)
Sleep(300)
SetChrName("")
#A0209
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"#3S……本公司身为糕点制造业界的领头羊,\x01",
"为了糕点制造业的未来,始终在不断钻研。\x01",
"本手册将会为您展示\x01",
"本公司的部分方面。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
Sound(18, 0, 100, 0)
SetChrName("")
#A0210
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"#3S对于糕点而言,\x01",
"最重要的就是\x01",
"能否让食用者感到『美味』。\x01",
"为此,本公司在\x01",
"『提高糕点的品质』\x01",
"这一点上绝对不会妥协。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
Sound(18, 0, 100, 0)
SetChrName("")
#A0211
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"#3S糕点制造工厂中配备了最新型设备,\x01",
"卫生方面也采取了最完善的处理措施,\x01",
"这些基本条件自不必说。\x01",
"至于糕点原材料的品质与产地,\x01",
"本公司也有着严格的要求。\x01",
"此外,关于商品开发这一过程,\x01",
"本公司也制定了严谨的步骤与基准。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
Sound(18, 0, 100, 0)
SetChrName("")
#A0212
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"#3S董事要亲自试吃开发中的商品,\x01",
"以便判断其上市销售的可行性。\x01",
"之后,还要经过多次企划会议讨论,\x01",
"才会正式投入到生产线。\x01",
"这都是为了能给顾客献上\x01",
"最美味的糕点,给顾客最好的享受,\x01",
"而从公司初创时便一直继承下来的传统。\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
Sound(18, 0, 100, 0)
SetChrName("")
#A0213
AnonymousTalk(
0xFF,
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"#3S昆西公司正是因为长期\x01",
"给顾客提供高品质的糕点,\x01",
"才能获得如今的成就……\x07\x00\x02",
)
)
CloseMessageWindow()
OP_57(0x0)
Sound(18, 0, 100, 0)
FadeToBright(300, 0)
OP_5A()
SetMessageWindowPos(14, 280, 60, 3)
Return()
# Function_11_3990 end
SaveToFile()
Try(main)
| [
"[email protected]"
] | |
507b5e4a2cf5d1be59559b0252c23e4d162aace9 | 7762ca6feb98c8b1c95da09758801a6bc38922ff | /NinjaGold/settings.py | 00af97c3a4b37d82f68939050baa3b893c96e2ba | [] | no_license | SDBranka/NinjaGold | 211bd6ade5e9c6a216ffef89a0c791a8a2d15ad5 | db881812842f2188df1da20edc81469fcb56a50a | refs/heads/main | 2023-04-29T01:02:07.427340 | 2021-05-22T19:05:34 | 2021-05-22T19:05:34 | 369,070,343 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,109 | py | """
Django settings for NinjaGold project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '48s9*0q%(s79!70c9!^vujzz0iy))40u)ikr66k=9x7y^d*pcs'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'NG_app',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'NinjaGold.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'NinjaGold.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
91ef1503ce75661dbbe6b7d791eda966a31b1c1d | 81eabe15995a6426b285b2312b73c0bde7bb61bc | /paleomix/tools/zonkey/common.py | 81ad379b116d4e6692319c1a2c4afc9f055ff3ca | [] | no_license | fvangef/paleomix | 3a732d8cd99177809b25bd09dde6efd261b10cad | 826fb866ae9c26cb7b49fc6a96fb618a3daaffcc | refs/heads/master | 2020-04-15T22:05:02.249220 | 2018-11-05T19:56:49 | 2018-11-05T19:56:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,897 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Mikkel Schubert <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import collections
import paleomix.yaml
import paleomix.common.versions as versions
# Format number for database file; is incremented when the format is changed.
# The 'revision' field specifies updates to the table that do not change the
# format of the database (see below).
_SUPPORTED_DB_FORMAT = 1
RSCRIPT_VERSION = versions.Requirement(call=("Rscript", "--version"),
search="version (\d+)\.(\d+)\.(\d+)",
checks=versions.GE(3, 0, 0),
priority=10)
class DBFileError(RuntimeError):
pass
def get_sample_names(handle):
samples = []
for readgroup in handle.header.get("RG", ()):
if "SM" in readgroup:
samples.append(readgroup["SM"])
return frozenset(samples)
def contig_name_to_plink_name(chrom):
"""Converts chromosome / contig name to the values expected by 'plink',
namely a digit or X/Y, or returns None if the chromosome could not be
identified.
"""
if chrom.isdigit():
return chrom.upper
elif chrom.upper() in "XY":
return chrom.upper()
elif chrom.lower().startswith("chr") and chrom[3:].isdigit():
return chrom[3:]
elif chrom.lower() in ("chrx", "chry"):
return chrom[3].upper()
else:
return None
def read_summary(filename, default="[MISSING VALUE!]"):
results = collections.defaultdict(lambda: default)
with open(filename) as makefile:
string = makefile.read()
data = paleomix.yaml.safe_load(string)
if not isinstance(data, dict):
raise DBFileError('Summary file does not contain dictionary')
results.update(data)
return results
| [
"[email protected]"
] | |
c35247face031fdcf18da283072975cf5773b968 | 64a80df5e23b195eaba7b15ce207743e2018b16c | /Downloads/adafruit-circuitpython-bundle-py-20201107/lib/adafruit_pybadger/pybadge.py | 6c341d8678773b63b175827e4b779cc10fcfcc22 | [] | no_license | aferlazzo/messageBoard | 8fb69aad3cd7816d4ed80da92eac8aa2e25572f5 | f9dd4dcc8663c9c658ec76b2060780e0da87533d | refs/heads/main | 2023-01-27T20:02:52.628508 | 2020-12-07T00:37:17 | 2020-12-07T00:37:17 | 318,548,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,899 | py | # The MIT License (MIT)
#
# Copyright (c) 2020 Kattni Rembor for Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
`adafruit_pybadger.pybadge`
================================================================================
Badge-focused CircuitPython helper library for PyBadge, PyBadge LC and EdgeBadge.
All three boards are included in this module as there is no difference in the
CircuitPython builds at this time, and therefore no way to differentiate
the boards from within CircuitPython.
* Author(s): Kattni Rembor
Implementation Notes
--------------------
**Hardware:**
* `Adafruit PyBadge <https://www.adafruit.com/product/4200>`_
* `Adafruit PyBadge LC <https://www.adafruit.com/product/3939>`_
* `Adafruit EdgeBadge <https://www.adafruit.com/product/4400>`_
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the supported boards:
https://github.com/adafruit/circuitpython/releases
"""
from collections import namedtuple
import board
import digitalio
import analogio
import audioio
from gamepadshift import GamePadShift
import adafruit_lis3dh
import neopixel
from adafruit_pybadger.pybadger_base import PyBadgerBase
__version__ = "3.1.1"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_PyBadger.git"
Buttons = namedtuple("Buttons", "b a start select right down up left")
class PyBadge(PyBadgerBase):
"""Class that represents a single PyBadge, PyBadge LC, or EdgeBadge."""
_audio_out = audioio.AudioOut
_neopixel_count = 5
def __init__(self):
super().__init__()
i2c = None
if i2c is None:
try:
i2c = board.I2C()
except RuntimeError:
self._accelerometer = None
if i2c is not None:
int1 = digitalio.DigitalInOut(board.ACCELEROMETER_INTERRUPT)
try:
self._accelerometer = adafruit_lis3dh.LIS3DH_I2C(
i2c, address=0x19, int1=int1
)
except ValueError:
self._accelerometer = adafruit_lis3dh.LIS3DH_I2C(i2c, int1=int1)
# NeoPixels
self._neopixels = neopixel.NeoPixel(
board.NEOPIXEL, self._neopixel_count, brightness=1, pixel_order=neopixel.GRB
)
self._buttons = GamePadShift(
digitalio.DigitalInOut(board.BUTTON_CLOCK),
digitalio.DigitalInOut(board.BUTTON_OUT),
digitalio.DigitalInOut(board.BUTTON_LATCH),
)
self._light_sensor = analogio.AnalogIn(board.A7)
@property
def button(self):
"""The buttons on the board.
Example use:
.. code-block:: python
from adafruit_pybadger import pybadger
while True:
if pybadger.button.a:
print("Button A")
elif pybadger.button.b:
print("Button B")
elif pybadger.button.start:
print("Button start")
elif pybadger.button.select:
print("Button select")
"""
button_values = self._buttons.get_pressed()
return Buttons(
*[
button_values & button
for button in (
PyBadgerBase.BUTTON_B,
PyBadgerBase.BUTTON_A,
PyBadgerBase.BUTTON_START,
PyBadgerBase.BUTTON_SELECT,
PyBadgerBase.BUTTON_RIGHT,
PyBadgerBase.BUTTON_DOWN,
PyBadgerBase.BUTTON_UP,
PyBadgerBase.BUTTON_LEFT,
)
]
)
pybadge = PyBadge() # pylint: disable=invalid-name
"""Object that is automatically created on import."""
| [
"[email protected]"
] | |
32ea31849e6bd4ef0acd560f4be8b565f98587d3 | f0b5238cf64ca46dafd8aab484278dd40feffa4d | /insta/migrations/0008_image_profile.py | 58a792198142bdfb3043e57e53faa92eb2d84078 | [
"MIT"
] | permissive | niklauspeter/instagram | 0e7ef612b4bd1301b8b1c146a281a645d5940f49 | 303e26f88d3cdcc9a7a8a05d41a6fa21bf91737e | refs/heads/master | 2021-09-09T14:44:48.293670 | 2019-05-23T15:56:49 | 2019-05-23T15:56:49 | 187,219,168 | 0 | 0 | null | 2021-09-08T01:00:34 | 2019-05-17T13:14:56 | Python | UTF-8 | Python | false | false | 548 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-05-23 06:56
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('insta', '0007_remove_image_profile_photo'),
]
operations = [
migrations.AddField(
model_name='image',
name='profile',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='insta.Profile'),
),
]
| [
"[email protected]"
] | |
d3d1ba274df3c9e32d65b77b40f7b3b416ade480 | ebcc3f199a4dc7763bb4984fc8a910d015b0c5d0 | /dht_temperature.py | 9e9f4a9f3703f52d6e30aab16700eaefb40ef65a | [
"MIT"
] | permissive | BurntTech/homie4 | 31aba5be338cee46ce2dad6483821cd837aa6704 | 577bdb413778865d3be03e0149e1773b5d312d51 | refs/heads/master | 2021-07-13T12:12:48.528194 | 2021-02-03T19:02:41 | 2021-02-03T19:02:41 | 233,911,796 | 1 | 0 | MIT | 2020-01-14T18:48:18 | 2020-01-14T18:48:17 | null | UTF-8 | Python | false | false | 634 | py | # Raspberry PI
import Adafruit_DHT
import time
from homie.device_temperature import Device_Temperature
mqtt_settings = {
'MQTT_BROKER' : 'OpenHAB',
'MQTT_PORT' : 1883,
}
try:
temperature_device = Device_Temperature(device_id="temperature-sensor-1",name = "Temperature_Sensor 1",mqtt_settings=mqtt_settings)
sensor = Adafruit_DHT.AM2302
pin = 4
while True:
humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
print(temperature)
temperature_device.update_temperature(temperature)
time.sleep(5)
except (KeyboardInterrupt, SystemExit):
print("Quitting.") | [
"[email protected]"
] | |
73d12c61155fbb679cf6f632c756bc0889002274 | c2f92d75d235ff5ed7b213c02c4a0657545ba02f | /newchama_web/2/newchama/tools/test_mq1.py | dd2c58bbbefb696a43f1e8523ee83b7da1bbcca3 | [] | no_license | cash2one/tstpthon | fab6112691eb15a8a26bd168af3f179913e0c4e0 | fc5c42c024065c7b42bea2b9de1e3874a794a30d | refs/heads/master | 2021-01-20T01:52:06.519021 | 2017-04-14T09:50:55 | 2017-04-14T09:50:55 | 89,338,193 | 0 | 1 | null | 2017-04-25T08:46:06 | 2017-04-25T08:46:06 | null | UTF-8 | Python | false | false | 847 | py | #encoding:utf-8
import os,sys
sys.path.append(os.path.abspath('../'))
sys.path.append(os.path.abspath('/var/www/newchama'))
import newchama.settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "newchama.settings")
import pika
import pickle
from django.template import loader, Context
connection = pika.BlockingConnection(pika.ConnectionParameters(
'localhost'))
channel = connection.channel()
channel.queue_declare(queue='email')
email="[email protected]"
mail_dic = dict()
mail_dic['email'] = email
mail_dic['name'] = 'richard'
html_content = loader.render_to_string('tools/update_mail.html', mail_dic)
c={}
c['title']=u'NewChama用户通知'
c['email']=email
c['content']=html_content
channel.basic_publish(exchange='', routing_key='email', body=pickle.dumps(c))
print " [x] Sent 'Hello World!'"
connection.close() | [
"[email protected]"
] | |
4013981c65eed2174946c158583b6552973aaab1 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03473/s440381272.py | 619cef532b71e399863972fc78c98e9f050e46ca | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | # -*- coding: utf-8 -*-
#----------
M = int(input().strip())
#----------
print(24+(24-M))
| [
"[email protected]"
] | |
40e044e81e637b03ed8ab1ee0a0bc10e3b4661f4 | bc167f434158921bcf2c678155c5cdfec1c9b0c9 | /PI_code/simulator/behaviourGeneration/firstGenScripts_preyHunter/behav372.py | 4181e4a1f1456cec22542057f7e400034e38635a | [] | no_license | s0217391/DifferentProjects | 6450efc89c64ecd21b86c705737e89e5c69433a6 | 7f4da153660817b6cbf72d2e823aa29c0c2f95a9 | refs/heads/master | 2021-01-17T02:58:46.219240 | 2015-05-26T22:45:46 | 2015-05-26T22:45:46 | 34,995,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 539 | py | #!/usr/bin/python
import sys
def compute(prey):
temp0 = min(prey[1], prey[0])
temp1 = -1 * prey[0]
if temp0 != 0:
temp1 = prey[0] / temp0
else:
temp1 = temp0
temp0 = temp0 - prey[1]
if temp1 > temp0:
if prey[0] > prey[1]:
if prey[1] != 0:
temp0 = temp0 % prey[1]
else:
temp0 = prey[1]
else:
temp0 = max(prey[1], temp0)
else:
temp0 = prey[0] * prey[1]
temp0 = temp1 + prey[1]
temp1 = -1 * temp1
temp0 = min(prey[1], prey[0])
temp0 = max(prey[1], prey[1])
temp0 = temp0 + temp0
return [temp0, temp0]
| [
"[email protected]"
] | |
b9533ae22c6a70939b28441379420cc7a1b533ae | e98e7b45d85273797cf9f15e92fbe685a05bde18 | /词条导出/zhidao_cron.py | 19c1cf5746d7e05af4e57c436af8f87088d3a9f0 | [] | no_license | wangdexinpython/test | 8d29d30e099f64f831b51265db7092d520df253c | a047148409e31b8a8140f2c13b959aa54ec14d0d | refs/heads/master | 2020-09-11T05:10:49.041795 | 2019-12-31T07:47:41 | 2019-12-31T07:47:41 | 221,948,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,004 | py | #coding=utf-8
import pymongo,time,requests,json
import urllib.parse
import redis,pexpect,os
class zhidao(object):
def __init__():
mongo=mongodb()
mon_app=app_mongo()
def mongodb():
mongo = pymongo.MongoClient(
"mongodb://xhql:" + urllib.parse.quote_plus("xhql_190228_snv738J72*fjVNv8220aiVK9V820@_")+"@172.26.26.132:20388/webpage")['webpage']
return mongo
def app_mongo():
mon = pymongo.MongoClient("mongodb://integrate:" + urllib.parse.quote_plus(
"integ_190228_snv738v8220aiVK9V820@_eate") + "@172.26.26.132:20388/integrate")
return mon
def Baike():
webnum = mongo.zhidao_details.find({'state_qiu':0,'source':'baiduzhidao'}).count()
print(webnum)
if webnum>0:
filetime = time.strftime("%Y%m%d", time.localtime())
filename = 'inc_zhidao_{}.dat'.format(filetime)
# filename = 'inc_zhidao_20190527.dat'
f = open(r'/mnt/data/liqiu/zhidao/{}'.format(filename),'a',encoding='utf-8')
for i in range(0,webnum,10000):
print('*****************************************',i)
# filetime = time.strftime("%Y%m%d_%H%M%S", time.localtime())
# filename = 'full_{}.dat'.format(filetime)
# f = open(r'/mnt/data/liqiu/{}'.format(filename),'a',encoding='utf-8')
zds = mongo.zhidao_details.find({'state_qiu':0,'source':'baiduzhidao'}).limit(10000).skip(i)
for one in zds:
try:
liqiu_dict = {'id':str(one['id']),'link':str(one['id']),'title':str(one['title']),'author':str(one['author']),'content':str(one['content_np']),'site_name':str(one['site_name']),'article_url':str(one['article_url']),'crawl_time':str(one['crawl_time']),'source':str(one['source']),'topic':'','flag':'0'}
if one.get('type',[]) and isinstance(one['type'],list):
liqiu_dict['type']=' '.join(one['type'])
elif one.get('type','') and isinstance(one['type'],str):
liqiu_dict['type']= one['type']
else:
liqiu_dict['type']=''
if one.get('label',[]) and isinstance(one['label'],list):
liqiu_dict['label']=' '.join(one['label'])
elif one.get('label',"") and isinstance(one['label'],str):
liqiu_dict['label']= one['label']
else:
liqiu_dict['label']=''
# if len(liqiu_dict)==0:
# continue
cons = liqiu_dict['content']
url = 'http://172.26.26.135:8995/topic?content={}'.format(cons)
ai = requests.get(url).text
print(ai)
if ai == 'AI':
ai = 'ai'
else:
ai = ''
liqiu_dict['topic'] = ai
read_dat(liqiu_dict)
f.write('{}\n'.format(json.dumps(liqiu_dict,ensure_ascii=False)))
s1={'id':one['id']}
s2 = {'$set':{'state_qiu':1}}
mongo.zhidao_details.update(s1,s2)
except KeyError as e:
print('异常')
print('---------------------------',e)
# continue
# f.write('{}\n'.format(json.dumps(liqiu_dict,ensure_ascii=False)))
def read_dat(,line):
if line['topic'] == 'ai':
dict_1 = {'id': line['id'], 'content': line['content'], 'crawl_time': line['crawl_time'],
'title': line['title'], 'source': line['source'], 'topic': line['topic'], 'type': line['type'],
'url': line['article_url']}
try:
dict_1['label'] = line['label']
except:
dict_1['label'] = ''
# print(dict_1)
mon_app.integrate.data_dat.update({'id': dict_1['id']}, dict_1, True)
def copy_data():
fileti = time.strftime("%H%M%S", time.localtime())
if int(fileti) > 230000:
# 判断文件是否为空
filetime = time.strftime("%Y%m%d", time.localtime())
filename = 'inc_zhidao_{}.dat'.format(filetime)
file2 = '/mnt/data/liqiu/zhidao/{}'.format(filename)
if os.path.getsize('{}'.format(file2)):
# 将写好的文件scp到指定文件夹下
cmd = "scp -r {} [email protected]:/home/search/ytt/search1/raw_data/src_data/".format(file2)
pexpect.run(cmd)
else:
pass
def run():
Baike()
copy_data()
if __name__ == '__main__':
zhi=zhidao()
zhi.run()
| [
"[email protected]"
] | |
ba469e691aa8feaecc648a26c1171ddcf7f037ce | a9789672eaba37c4b391756c7fadb472609ce8ed | /knowtator2standoff.py | d527a6f765c0fd5ff61fa86d8c26416b15a7be32 | [] | no_license | spyysalo/knowtator2standoff | acb68666a6b4dfcd66baf53f1fb7c213f01f38d1 | 18d76d958d13230ff661917e0319668969dc0ec1 | refs/heads/master | 2020-06-20T06:39:40.401026 | 2016-11-28T10:56:56 | 2016-11-28T10:56:56 | 74,897,798 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,246 | py | #!/usr/bin/env python
from __future__ import print_function
import sys
import re
import io
import xml.etree.cElementTree as ET
from collections import defaultdict
from os import path
from logging import warn
DOCDIR = 'craft-2.0/articles/txt/' # TODO: CLI argument
MAX_SPAN = 150 # TODO: CLI argument
# Mapping from CRAFT to standoff types
type_map = {
'Entrez Gene sequence': 'EntrezGene',
'organism': 'NCBITaxon',
}
class FormatError(Exception):
pass
class Annotation(object):
"""Knowtator annotation."""
def __init__(self, spans, texts, mention_id):
self.spans = spans
self.texts = texts
self.mention_id = mention_id
self.mention = None
@property
def type(self):
if self.mention is None:
raise ValueError('no mention (call map_mentions?)')
type_ = self.mention.type
type_ = type_map.get(type_, type_)
return type_
def to_standoff(self):
t_id = next_free_id('T')
type_ = self.type.replace(' ', '_')
spans = ';'.join('{} {}'.format(s, e) for s, e in self.spans)
texts = u' '.join(self.texts)
t_ann = u'{}\t{} {}\t{}'.format(t_id, type_, spans, texts)
return [t_ann]
@classmethod
def from_xml(cls, e, doc_text):
"""Return Annotation from ElementTree element."""
spans = []
for s in e.findall('span'):
spans.append((int(s.get('start')), int(s.get('end'))))
if not spans:
raise FormatError('no spans for annotation')
text = findonly(e, 'spannedText').text
if not text:
raise FormatError('no text for annotation')
texts = text.split(' ... ') # Resolve discontinuous annotations
if len(spans) != len(texts):
raise FormatError('{} spans, {} texts'.format(len(self.spans),
len(self.texts)))
fixed_spans = []
for span, text in zip(spans, texts):
start, end = span
if len(text) != end-start:
msg = 'text length mismatch: "{}" ({}) vs {}-{} ({})'.format(
text.encode('utf-8'), len(text), start, end, end-start)
if doc_text[start:start+len(text)] == text:
msg += ' (adjust end: "{}" to "{}")'.format(
doc_text[start:end].encode('utf-8'),
doc_text[start:start+len(text)].encode('utf-8'))
end = start+len(text)
elif doc_text[end-len(text):end] == text:
msg += ' (adjust start: "{}" to "{}")'.format(
doc_text[start:end].encode('utf-8'),
doc_text[end-len(text):end].encode('utf-8'))
start = end-len(text)
else:
msg += ' (failed to adjust)'
warn(msg)
if len(text) != end-start:
raise FormatError(
'Text mismatch: "{}" (len {}) vs "{}" ({}-{})'.format(
text.encode('utf-8'), len(text),
doc_text[start:end].encode('utf-8'), start, end))
fixed_spans.append((start, end))
spans = fixed_spans
# sanity check
if spans[-1][1] - spans[0][0] > MAX_SPAN:
raise FormatError('span length over MAX_SPAN: {} ({})'.format(
text.encode('utf-8'), spans))
mention_id = findonly(e, 'mention').get('id')
return cls(spans, texts, mention_id)
def findonly(e, tag):
"""Return only child of ElementTree element with given tag."""
found = e.findall(tag)
if len(found) != 1:
raise FormatError('expected single {} child, found {}'.format(
tag, len(found)))
return found[0]
def next_free_id(prefix):
idx = next_free_id.id_map[prefix] + 1
next_free_id.id_map[prefix] += 1
return prefix + str(idx)
next_free_id.id_map = defaultdict(int)
def reset_next_free_id():
next_free_id.id_map = defaultdict(int)
class ClassMention(object):
"""Knowtator ClassMention."""
def __init__(self, id_, class_id, text):
self.id = id_
self.class_id = class_id
self.text = text
@property
def type(self):
m = re.match(r'^([a-zA-Z]+):(\d+)$', self.class_id)
if m:
return m.group(1) # TYPE:NUM ontology ID
else:
return self.class_id
@classmethod
def from_xml(cls, e):
"""Return ClassMention from ElementTree element."""
id_ = e.get('id')
c = findonly(e, 'mentionClass')
class_id = c.get('id')
text = c.text
return cls(id_, class_id, text)
class StringSlotMention(object):
"""Knowtator StringSlotMention."""
def __init__(self, id_, slot, value):
self.id = id_
self.slot = slot
self.value = value
@property
def type(self):
return self.slot
@classmethod
def from_xml(cls, e):
"""Return StringSlotMention from ElementTree element."""
id_ = e.get('id')
slot = findonly(e, 'mentionSlot').get('id')
value = findonly(e, 'stringSlotMentionValue').get('value')
return cls(id_, slot, value)
class IntegerSlotMention(object):
"""Knowtator IntegerSlotMention."""
def __init__(self, id_, slot, value):
self.id = id_
self.slot = slot
self.value = value
@property
def type(self):
return self.slot
@classmethod
def from_xml(cls, e):
"""Return IntegerSlotMention from ElementTree element."""
id_ = e.get('id')
slot = findonly(e, 'mentionSlot').get('id')
value = findonly(e, 'integerSlotMentionValue').get('value')
return cls(id_, slot, value)
class BooleanSlotMention(object):
"""Knowtator BooleanSlotMention."""
def __init__(self, id_, slot, value):
self.id = id_
self.slot = slot
self.value = value
@property
def type(self):
return self.slot
@classmethod
def from_xml(cls, e):
"""Return BooleanSlotMention from ElementTree element."""
id_ = e.get('id')
slot = findonly(e, 'mentionSlot').get('id')
value = findonly(e, 'booleanSlotMentionValue').get('value')
return cls(id_, slot, value)
class ComplexSlotMention(object):
"""Knowtator ComplexSlotMention."""
def __init__(self, id_, slot, values):
self.id = id_
self.slot = slot
self.values = values
@property
def type(self):
return self.slot
@classmethod
def from_xml(cls, e):
"""Return ComplexSlotMention from ElementTree element."""
id_ = e.get('id')
slot = findonly(e, 'mentionSlot').get('id')
values = [c.get('value') for c in e.findall('complexSlotMentionValue')]
return cls(id_, slot, values)
def get_text(docid, docdir=DOCDIR, encoding='utf-8'):
if docid not in get_text.text_by_docid:
fn = path.join(docdir, docid)
with io.open(fn, encoding=encoding) as f:
text = f.read()
get_text.text_by_docid[docid] = text
return get_text.text_by_docid[docid]
get_text.text_by_docid = {}
_mention_class_map = {
'classMention': ClassMention,
'complexSlotMention': ComplexSlotMention,
'integerSlotMention': IntegerSlotMention,
'booleanSlotMention': BooleanSlotMention,
'stringSlotMention': StringSlotMention,
}
def load_knowtator_xml(fn):
tree = ET.parse(fn)
root = tree.getroot()
docid = root.get('textSource')
text = get_text(docid)
annotations = []
mentions = []
for e in root:
try:
if e.tag == 'annotation':
annotations.append(Annotation.from_xml(e, text))
elif e.tag in _mention_class_map:
mentions.append(_mention_class_map[e.tag].from_xml(e))
else:
raise FormatError('unexpected element {}'.format(e.tag))
except FormatError, error:
print('error parsing {}: {} ({})'.format(fn, error, e),
file=sys.stderr)
return docid, annotations, mentions
def map_mentions(annotations, mentions):
mention_by_id = {}
for m in mentions:
assert m.id not in mention_by_id, 'duplidate id: {}'.format(m.id)
mention_by_id[m.id] = m
for a in annotations:
a.mention = mention_by_id[a.mention_id]
def main(argv):
if len(argv) < 2:
print('Usage: {} [FILE [...]]'.format(__file__))
return 1
annotations_by_id = defaultdict(list)
for fn in argv[1:]:
try:
docid, annotations, mentions = load_knowtator_xml(fn)
map_mentions(annotations, mentions)
annotations_by_id[docid].extend(annotations)
except Exception, e:
print('failed to parse {}: {}'.format(fn, e), file=sys.stderr)
for docin, annotations in annotations_by_id.items():
reset_next_free_id()
for a in annotations:
for t in a.to_standoff():
print(t.encode('utf-8'))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| [
"[email protected]"
] | |
c69c64d15e9879c0c3e8bb12dc4086d660d80025 | 601443d21d3c9f9121e6aec76e2ad012ec4a7817 | /arekit/contrib/utils/pipelines/text_opinion/filters/distance_based.py | ba536accb1dcf557dfb01ce0bdd5f75bd5db3952 | [
"MIT"
] | permissive | nicolay-r/AREkit | 27421472ca296671a6da69a94c1070a0b5a33451 | 1e1d354654f4f0a72090504663cc6d218f6aaf4a | refs/heads/master | 2023-08-29T13:30:26.511617 | 2023-08-13T20:11:43 | 2023-08-13T20:11:43 | 225,708,027 | 54 | 4 | MIT | 2023-01-18T13:17:01 | 2019-12-03T20:20:46 | Python | UTF-8 | Python | false | false | 650 | py | from arekit.common.data.input.sample import InputSampleBase
from arekit.contrib.utils.pipelines.text_opinion.filters.base import TextOpinionFilter
class DistanceLimitedTextOpinionFilter(TextOpinionFilter):
def __init__(self, terms_per_context):
super(DistanceLimitedTextOpinionFilter, self).__init__()
self.__terms_per_context = terms_per_context
def filter(self, text_opinion, parsed_news, entity_service_provider):
return InputSampleBase.check_ability_to_create_sample(
entity_service=entity_service_provider,
text_opinion=text_opinion,
window_size=self.__terms_per_context)
| [
"[email protected]"
] | |
81d2d43d971b207b2dd0bcc44c97e8f6c0f921da | 7f04fbc897ff52e4d27cc2f27ae6dfbabe43dfe0 | /cellml/pmr2/tests/base.py | 9100a7a3ffe800da9cdfd74b377716fd6c6545ab | [] | no_license | metatoaster/cellml.pmr2 | daae69721af04f7a28eae496dcbeb13b98e2d4d0 | cbfe212effe325350b1e7087e6172952483b981f | refs/heads/master | 2021-06-21T20:56:30.267128 | 2016-03-08T05:45:53 | 2016-03-08T05:45:53 | 2,396,487 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,105 | py | import unittest
import doctest
from zope.component import testing
from Testing import ZopeTestCase as ztc
from Products.PloneTestCase import PloneTestCase as ptc
from Products.PloneTestCase.layer import onsetup
from Products.PloneTestCase.layer import onteardown
from Products.Five import fiveconfigure
from Zope2.App import zcml
import pmr2.app
from pmr2.testing.base import TestRequest
from pmr2.app.exposure.content import ExposureContainer
from pmr2.app.exposure.browser.browser import ExposureAddForm
from pmr2.app.exposure.browser.browser import ExposureFileGenForm
from pmr2.app.exposure.tests.base import ExposureDocTestCase
from pmr2.app.exposure.tests.base import ExposureExtendedDocTestCase
@onsetup
def setup():
import pmr2.app
import cellml.pmr2
fiveconfigure.debug_mode = True
# XXX dependant on pmr2.app still
zcml.load_config('configure.zcml', cellml.pmr2)
zcml.load_config('test.zcml', cellml.pmr2.tests)
fiveconfigure.debug_mode = False
ztc.installPackage('cellml.pmr2')
@onteardown
def teardown():
pass
setup()
teardown()
ptc.setupPloneSite(products=('cellml.pmr2',))
class CellMLDocTestCase(ExposureExtendedDocTestCase):
def setUp(self):
super(CellMLDocTestCase, self).setUp()
import cellml.pmr2
rev = u'2'
request = TestRequest(
form={
'form.widgets.workspace': u'rdfmodel',
'form.widgets.commit_id': rev,
'form.buttons.add': 1,
})
testform = ExposureAddForm(self.portal.exposure, request)
testform.update()
exp_id = testform._data['id']
context = self.portal.exposure[exp_id]
self.exposure1 = context
rdfmodel = self.portal.workspace.rdfmodel
self.file1 = u'example_model.cellml'
request = TestRequest(
form={
'form.widgets.filename': [self.file1],
'form.buttons.add': 1,
})
testform = ExposureFileGenForm(context, request)
testform.update()
self.exposure_file1 = context[self.file1]
| [
"[email protected]"
] | |
33ca2cbec3283c60f3f48ff39bcc8624ecb5d8f8 | a86bd96433a98e2311447a1923a400470d231f7e | /almanac/style/highlight.py | 93bc92ffea0c08e9b9383963588506d9d14bda0a | [
"MIT",
"BSD-3-Clause"
] | permissive | welchbj/almanac | 3e0d1e8282ec00ad17854536526cf253b331a201 | 7ba473ef07173e0f017dd151e7ca425ba149b8fe | refs/heads/main | 2022-12-18T12:51:53.039850 | 2022-07-06T01:25:03 | 2022-07-06T01:25:03 | 193,141,053 | 5 | 2 | MIT | 2022-12-08T14:28:58 | 2019-06-21T18:07:22 | Python | UTF-8 | Python | false | false | 1,025 | py | from typing import Optional, Type
from pygments import highlight
from pygments.formatter import Formatter
from pygments.formatters import TerminalFormatter
from pygments.lexers import get_lexer_for_mimetype
from pygments.util import ClassNotFound
def highlight_for_mimetype(
text: str,
mimetype: str,
*,
fallback_mimetype: Optional[str] = 'text/plain',
formatter_cls: Type[Formatter] = TerminalFormatter
) -> str:
"""Return ANSI-escaped highlighted text, as per the .
If ``mimetype`` cannot be resolved, then ``fallback_mimetype`` will be used.
If that cannot be resolved (or is ``None``), then the pygments ``ClassNotFound``
exception will be raised.
"""
try:
lexer = get_lexer_for_mimetype(mimetype)
except ClassNotFound as e:
if fallback_mimetype is not None:
lexer = get_lexer_for_mimetype(fallback_mimetype)
else:
raise e
highlighted_text: str = highlight(text, lexer, formatter_cls())
return highlighted_text
| [
"[email protected]"
] | |
58d1f9cf803febc2a58fb26e573063434eae588c | caaf9046de59559bb92641c46bb8ab00f731cb46 | /Configuration/Generator/python/JpsiMM_Pt_20_inf_8TeV_TuneCUETP8M1_cfi.py | 3d826f915126679c530acffd43c4e184f6851393 | [] | no_license | neumeist/cmssw | 7e26ad4a8f96c907c7373291eb8df205055f47f0 | a7061201efe9bc5fa3a69069db037d572eb3f235 | refs/heads/CMSSW_7_4_X | 2020-05-01T06:10:08.692078 | 2015-01-11T22:57:32 | 2015-01-11T22:57:32 | 29,109,257 | 1 | 1 | null | 2015-01-11T22:56:51 | 2015-01-11T22:56:49 | null | UTF-8 | Python | false | false | 2,939 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
source = cms.Source("EmptySource")
generator = cms.EDFilter("Pythia8GeneratorFilter",
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(0.0154),
pythiaHepMCVerbosity = cms.untracked.bool(False),
crossSection = cms.untracked.double(354400000.0),
comEnergy = cms.double(8000.0),
maxEventsToPrint = cms.untracked.int32(0),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'Charmonium:states(3S1) = 443', # filter on 443 and prevents other onium states decaying to 443, so we should turn the others off
'Charmonium:O(3S1)[3S1(1)] = 1.16',
'Charmonium:O(3S1)[3S1(8)] = 0.0119',
'Charmonium:O(3S1)[1S0(8)] = 0.01',
'Charmonium:O(3S1)[3P0(8)] = 0.01',
'Charmonium:gg2ccbar(3S1)[3S1(1)]g = on',
'Charmonium:gg2ccbar(3S1)[3S1(8)]g = on',
'Charmonium:qg2ccbar(3S1)[3S1(8)]q = on',
'Charmonium:qqbar2ccbar(3S1)[3S1(8)]g = on',
'Charmonium:gg2ccbar(3S1)[1S0(8)]g = on',
'Charmonium:qg2ccbar(3S1)[1S0(8)]q = on',
'Charmonium:qqbar2ccbar(3S1)[1S0(8)]g = on',
'Charmonium:gg2ccbar(3S1)[3PJ(8)]g = on',
'Charmonium:qg2ccbar(3S1)[3PJ(8)]q = on',
'Charmonium:qqbar2ccbar(3S1)[3PJ(8)]g = on',
'443:onMode = off', # ignore cross-section re-weighting (CSAMODE=6) since selecting wanted decay mode
'443:onIfAny = 13',
'PhaseSpace:pTHatMin = 20.',
),
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters',
)
)
)
oniafilter = cms.EDFilter("PythiaFilter",
Status = cms.untracked.int32(2),
MaxEta = cms.untracked.double(1000.0),
MinEta = cms.untracked.double(-1000.0),
MinPt = cms.untracked.double(0.0),
ParticleID = cms.untracked.int32(443)
)
mumugenfilter = cms.EDFilter("MCParticlePairFilter",
Status = cms.untracked.vint32(1, 1),
MinPt = cms.untracked.vdouble(0.5, 0.5),
MinP = cms.untracked.vdouble(2.7, 2.7),
MaxEta = cms.untracked.vdouble(2.5, 2.5),
MinEta = cms.untracked.vdouble(-2.5, -2.5),
ParticleCharge = cms.untracked.int32(-1),
ParticleID1 = cms.untracked.vint32(13),
ParticleID2 = cms.untracked.vint32(13)
)
ProductionFilterSequence = cms.Sequence(generator*oniafilter*mumugenfilter)
| [
"[email protected]"
] | |
614afacb7d88abe1697191ba3dc5fea6cdce83ef | a520eb3a99c0e17760cb185b61da2c5e8ae36bed | /apps/users/tests.py | 0e48b5040d17cd508e8ea78902476af196085d14 | [] | no_license | zhuoxiaojian/yishengAnalyze | 9cd4b984a4c90d23d6e2d324def187b88d5b737b | 18d2afad78f8cf3a734d41d835e7caf7635fca47 | refs/heads/master | 2022-12-10T21:30:25.176482 | 2019-01-19T08:55:46 | 2019-01-19T08:55:46 | 153,866,303 | 1 | 1 | null | 2022-12-09T05:32:09 | 2018-10-20T03:32:46 | JavaScript | UTF-8 | Python | false | false | 133 | py | from django.test import TestCase
# Create your tests here.
from users.tasks import test
if __name__ == '__main__':
test.delay()
| [
"[email protected]"
] | |
3a19c9c5be00b701cdd309ad99d37a8fd77a6021 | cd257631f442d24d2e4902cfb60d05095e7c49ad | /week-02/day-01/average_of_input.py | d18279b22f7452cd634a2164b12f176064e3c4ef | [] | no_license | green-fox-academy/Chiflado | 62e6fc1244f4b4f2169555af625b6bfdda41a975 | 008893c63a97f4c28ff63cab269b4895ed9b8cf1 | refs/heads/master | 2021-09-04T03:25:25.656921 | 2018-01-15T09:02:47 | 2018-01-15T09:02:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | # Write a program that asks for 5 integers in a row,
# then it should print the sum and the average of these numbers like:
#
# Sum: 22, Average: 4.4
number = 0
for i in range(0, 5):
number += int(input('Give me a number: '))
print('Sum: ' + str(number) + ' Average: ' + str(number/(i + 1))) | [
"[email protected]"
] | |
4843b1d4b9b7d2f2fe304e9278792b5d93d54487 | 23f6dbacd9b98fdfd08a6f358b876d3d371fc8f6 | /rootfs/usr/lib/pymodules/python2.6/orca/scripts/toolkits/VCL.py | d29693eaa79ca6a0de4d1cfb80bdd0e09f4e8f63 | [] | no_license | xinligg/trainmonitor | 07ed0fa99e54e2857b49ad3435546d13cc0eb17a | 938a8d8f56dc267fceeb65ef7b867f1cac343923 | refs/heads/master | 2021-09-24T15:52:43.195053 | 2018-10-11T07:12:25 | 2018-10-11T07:12:25 | 116,164,395 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48 | py | /usr/share/pyshared/orca/scripts/toolkits/VCL.py | [
"[email protected]"
] | |
1210e7360134b655175e57ae56324fe180e8c0be | c6320735f140944d2c282729c008a7cf7cf1e98f | /docs/samples/explanation/income/train.py | 1f390f64d00d252386861f2eb8e6c0452dd63fec | [
"Apache-2.0"
] | permissive | gipster/kfserving | 66d2dffd8917ba9029ca2e96f199e1f56df6e41b | bbd3da47a708403fb2a203e28955d5454bc2a1d5 | refs/heads/master | 2020-06-10T18:43:57.148347 | 2019-08-19T00:24:03 | 2019-08-19T00:24:03 | 193,709,786 | 0 | 0 | Apache-2.0 | 2019-06-25T13:08:50 | 2019-06-25T13:08:49 | null | UTF-8 | Python | false | false | 2,400 | py | import numpy as np
from sklearn.ensemble import RandomForestClassifier
from sklearn.compose import ColumnTransformer
from sklearn.impute import SimpleImputer
from sklearn.preprocessing import StandardScaler, OneHotEncoder
from alibi.datasets import adult
import joblib
import dill
from sklearn.pipeline import Pipeline
import alibi
# load data
data, labels, feature_names, category_map = adult()
# define train and test set
np.random.seed(0)
data_perm = np.random.permutation(np.c_[data, labels])
data = data_perm[:, :-1]
labels = data_perm[:, -1]
idx = 30000
X_train, Y_train = data[:idx, :], labels[:idx]
X_test, Y_test = data[idx + 1:, :], labels[idx + 1:]
# feature transformation pipeline
ordinal_features = [x for x in range(len(feature_names)) if x not in list(category_map.keys())]
ordinal_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')),
('scaler', StandardScaler())])
categorical_features = list(category_map.keys())
categorical_transformer = Pipeline(steps=[('imputer', SimpleImputer(strategy='median')),
('onehot', OneHotEncoder(handle_unknown='ignore'))])
preprocessor = ColumnTransformer(transformers=[('num', ordinal_transformer, ordinal_features),
('cat', categorical_transformer, categorical_features)])
# train an RF model
print("Train random forest model")
np.random.seed(0)
clf = RandomForestClassifier(n_estimators=50)
pipeline = Pipeline([('preprocessor', preprocessor),
('clf', clf)])
pipeline.fit(X_train, Y_train)
print("Creating an explainer")
predict_fn = lambda x: clf.predict(preprocessor.transform(x))
explainer = alibi.explainers.AnchorTabular(predict_fn=predict_fn,
feature_names=feature_names,
categorical_names=category_map)
explainer.fit(X_train)
explainer.predict_fn = None # Clear explainer predict_fn as its a lambda and will be reset when loaded
with open("explainer.dill", 'wb') as f:
dill.dump(explainer,f)
print("Saving individual files")
# Dump files - for testing creating an AnchorExplainer from components
joblib.dump(pipeline, 'model.joblib')
joblib.dump(X_train, "train.joblib")
joblib.dump(feature_names, "features.joblib")
joblib.dump(category_map, "category_map.joblib")
| [
"[email protected]"
] | |
c56f4e4fb4ccc6672145c456c1c1d50ffbfd1d54 | eb91c2d2560a3e4ce35ebc2d6550f001579c03c5 | /codeforces/1353/B.py | 9f295a1f5fb9c4df31d57960b9fb7930b9a23708 | [] | no_license | kamojiro/codeforces | 0a3a40c8cab96a0257bb9d6ed53de217192cbabb | 9e66297fa3025ba6731111ab855096d579e86c67 | refs/heads/master | 2021-07-07T11:47:42.373189 | 2020-08-15T14:45:36 | 2020-08-15T14:45:36 | 176,296,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 522 | py | #import sys
#input = sys.stdin.readline
def main():
N = int( input())
for _ in range(N):
n, k = map( int, input().split())
A = list( map( int, input().split()))
B = list( map( int, input().split()))
A.sort()
B.sort(reverse=True)
ans = 0
for i in range(n):
if i+1 <= k:
if A[i] < B[i]:
ans += B[i]
continue
ans += A[i]
print( ans)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
d8d125160792a97e1c2c5c39a0c928f1655589b2 | 250124d214f6834230314dfee4a5dd03713953a2 | /part-2/2-iterators/9-Iterating_callables.py | 0dcb235c2e78a05bf787172829de56522d7aafc5 | [
"MIT"
] | permissive | boconlonton/python-deep-dive | 3b26b913d1f6f2fdf451a8bc4f24a24d1bb85a64 | c01591a4943c7b77d4d2cd90a8b23423280367a3 | refs/heads/master | 2023-08-30T21:20:12.240813 | 2021-09-29T22:21:26 | 2021-09-29T22:21:26 | 365,419,435 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,347 | py | """Iterating callables"""
import random
def counter():
i = 0
def inc():
nonlocal i
i += 1
return i
return inc
class CallableIterator:
def __init__(self, callable_, sentinel):
self.callable = callable_
self.sentinel = sentinel
self.is_consumed = False
def __iter__(self):
return self
def __next__(self):
if self.is_consumed:
# Exhaust the callable after consumed
raise StopIteration
else:
result = self.callable()
if result == self.sentinel:
self.is_consumed = True
raise StopIteration
else:
return result
# Usage
cnt = counter()
cnt_iter = CallableIterator(cnt, 5)
for c in cnt_iter:
print(c)
# Usage with iter()
cnt = counter()
cnt_iter = iter(cnt, 5)
for c in cnt_iter:
print(c)
# Create an iterator for random function
# which will stop when meet sentinel
random_iter = iter(lambda:
random.randint(0, 10), 8)
random.seed(0)
for num in random_iter:
print(num)
def countdown(start=10):
def run():
nonlocal start
start -= 1
return start
return run
print('---------')
takeoff = countdown(10)
takeoff_iter = iter(takeoff, -1)
for num in takeoff_iter:
print(num)
| [
"[email protected]"
] | |
2b612f6eea0c6ac37a27d2d8fb6083285ff16073 | 19bc4d44dc7303e23a6949b1bc7b98b65bcf80e9 | /python/Linear Regression in Python/Simple Linear Regression/Linear Regression at Codecademy/script.py | 661d035628a95c8b252a74e85e4a4024c02fe7a9 | [] | no_license | henry1034/Challenge-Project-of-CodeCademy | c66190ff3a318e22f263fcf78344632773065c24 | 61ebe84696cec120393acca62b4fce4bdea0fb30 | refs/heads/master | 2023-07-04T01:04:16.978374 | 2021-07-29T17:27:56 | 2021-07-29T17:27:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,583 | py | # Load libraries
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import statsmodels.api as sm
import codecademylib3
# Read in the data
codecademy = pd.read_csv('codecademy.csv')
# Print the first five rows
print(codecademy.head())
# Create a scatter plot of score vs completed
plt.scatter(codecademy.completed, codecademy.score)
# Show then clear plot
plt.show()
plt.clf()
# Fit a linear regression to predict score based on prior lessons completed
model = sm.OLS.from_formula(
"score ~ completed",
data = codecademy
)
result = model.fit()
print(result.params)
# Intercept interpretation:
print("A learner who has previously completed 0 content items is expected to earn a quiz score of 13.2 points.")
# Slope interpretation:
print("Students who have completed one additional prior content item are expected to score 1.3 points higher on the quiz.")
# Plot the scatter plot with the line on top
plt.scatter(codecademy.completed, codecademy.score)
plt.plot(codecademy.completed, result.predict(codecademy))
# Show then clear plot
plt.show()
plt.clf()
# Predict score for learner who has completed 20 prior lessons
print(result.predict({'completed':[20]}))
intercept = result.params[0]
slope = result.params[1]
print(slope * 20 + intercept)
# Calculate fitted values
fitted_values = result.predict(codecademy)
# Calculate residuals
residuals = codecademy.score - fitted_values
# Check normality assumption
plt.hist(residuals)
# Show then clear the plot
plt.show()
plt.clf()
# Check homoscedasticity assumption
plt.scatter(fitted_values, residuals)
# Show then clear the plot
plt.show()
plt.clf()
# Create a boxplot of score vs lesson
sns.boxplot(
data = codecademy,
x = "lesson",
y = "score"
)
# Show then clear plot
plt.show()
plt.clf()
# Fit a linear regression to predict score based on which lesson they took
model = sm.OLS.from_formula(
"score ~ lesson",
data = codecademy
)
result = model.fit()
print(result.params)
# Calculate and print the group means and mean difference (for comparison)
mean_score_lessonA = np.mean(codecademy.score[codecademy.lesson == 'Lesson A'])
mean_score_lessonB = np.mean(codecademy.score[codecademy.lesson == 'Lesson B'])
print('Mean score (A): ', mean_score_lessonA)
print('Mean score (B): ', mean_score_lessonB)
print('Mean score difference: ', mean_score_lessonA - mean_score_lessonB)
# Use `sns.lmplot()` to plot `score` vs. `completed` colored by `lesson`
sns.lmplot(
x = "completed",
y = "score",
hue = "lesson",
data = codecademy
)
plt.show()
plt.clf()
| [
"[email protected]"
] | |
950e9fce4dcbd3c0bc732cdc70d82b7bb4f0e7c3 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/AlipayIserviceIsresourceTenantquerybytntidQueryModel.py | bf348d94e07635b10d4f588191dab57c1660c589 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 1,400 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayIserviceIsresourceTenantquerybytntidQueryModel(object):
def __init__(self):
self._tnt_inst_id = None
self._ur_id = None
@property
def tnt_inst_id(self):
return self._tnt_inst_id
@tnt_inst_id.setter
def tnt_inst_id(self, value):
self._tnt_inst_id = value
@property
def ur_id(self):
return self._ur_id
@ur_id.setter
def ur_id(self, value):
self._ur_id = value
def to_alipay_dict(self):
params = dict()
if self.tnt_inst_id:
if hasattr(self.tnt_inst_id, 'to_alipay_dict'):
params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict()
else:
params['tnt_inst_id'] = self.tnt_inst_id
if self.ur_id:
if hasattr(self.ur_id, 'to_alipay_dict'):
params['ur_id'] = self.ur_id.to_alipay_dict()
else:
params['ur_id'] = self.ur_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayIserviceIsresourceTenantquerybytntidQueryModel()
if 'tnt_inst_id' in d:
o.tnt_inst_id = d['tnt_inst_id']
if 'ur_id' in d:
o.ur_id = d['ur_id']
return o
| [
"[email protected]"
] | |
74ca68420b60222f058228f98a1a446f42d5311d | 0e3f14722cd87767d29f794530dc1eabc4678a14 | /projects/migrations/0001_initial.py | bf6df9a575080f5727e6d0d3115ebfc864eafca8 | [] | no_license | Mostaquim/mycampaign | e807386b5bc034c0bf8689f29da07bae752ef971 | 4343ff08cb7d86de3efcc3e81b49ca93d01e7ae9 | refs/heads/master | 2020-05-09T23:51:06.345794 | 2019-05-09T10:24:22 | 2019-05-09T10:24:22 | 181,513,963 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,738 | py | # Generated by Django 2.1 on 2019-05-06 18:40
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('accounts', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Invoice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('currency', models.IntegerField(choices=[(1, '£')])),
('sent_date', models.DateField(auto_now_add=True)),
('issue_date', models.DateField()),
('due_date', models.DateField()),
('paid_date', models.DateField(null=True)),
('terms', models.TextField()),
('discount', models.DecimalField(decimal_places=2, max_digits=11)),
('tax', models.DecimalField(decimal_places=2, max_digits=11)),
('total', models.DecimalField(decimal_places=2, max_digits=11)),
('status', models.IntegerField(choices=[(1, 'Sent'), (2, 'Open'), (3, 'Paid'), (4, 'Partially paid'), (5, 'Cancelled')])),
('second_tax', models.DecimalField(decimal_places=2, max_digits=11)),
],
),
migrations.CreateModel(
name='InvoiceItems',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amount', models.DecimalField(decimal_places=2, max_digits=11)),
('description', models.TextField()),
('value', models.DecimalField(decimal_places=2, max_digits=11)),
('name', models.CharField(max_length=255, null=True)),
('item_type', models.CharField(max_length=255, null=True)),
],
),
migrations.CreateModel(
name='PrintingOrder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pages', models.IntegerField(choices=[(1, 'Single Sided'), (2, 'Double Sided'), (3, '2 Pages'), (4, '4 Pages'), (5, '6 Pages'), (6, '8 Pages'), (7, '10 Pages'), (8, '12 Pages')])),
('page_orientation', models.IntegerField(choices=[(1, 'Portrait'), (2, 'Landscape')])),
('colours', models.IntegerField(choices=[(1, '1/0-coloured Black'), (2, '2/0-coloured Black + Pantone'), (3, '2/0-coloured Black + Gold'), (4, '4/0-coloured CMYK')])),
('processing', models.IntegerField(choices=[(1, 'Trimming'), (2, 'Trimming Corner Rounded')])),
('priority', models.IntegerField(choices=[(1, 'Low'), (2, 'Normal'), (3, 'High'), (4, 'Urgent')], default=1)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type_of_service', models.IntegerField(choices=[(1, 'Business To Business'), (2, 'Hand To Hand'), (3, 'Direct Mail'), (4, 'Residential Homes'), (5, 'Shared Distribution'), (6, 'Consultation Distribution')], default=1)),
('number_of_boxes', models.IntegerField(choices=[(1, '1'), (2, '2'), (3, '3'), (4, '4 or more'), (5, 'N/A')], default=1)),
('type_of_media', models.IntegerField(choices=[(1, 'Flyer'), (2, 'Leaflet'), (3, 'Folded Leaflet'), (4, 'Other')], default=1)),
('require_collection', models.IntegerField(choices=[(1, 'Yes'), (2, 'No')], default=1)),
('quantity_of_flyers', models.IntegerField(null=True)),
('title_of_media', models.CharField(max_length=255, null=True)),
('campaign_details', models.TextField(max_length=255)),
('agreed_cost', models.DecimalField(decimal_places=2, max_digits=11)),
('campaign_start_date', models.DateField()),
('campaign_finish_date', models.DateField()),
('special_instruction', models.TextField()),
('progress', models.IntegerField(default=1)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('attachments', models.ManyToManyField(to='core.Attachments')),
('company', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='company', to='accounts.Company')),
('project_admin', models.ForeignKey(limit_choices_to={'staff': True}, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='project_admin', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ProjectActivity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('subject', models.CharField(max_length=255)),
('message', models.TextField()),
('acitivity_type', models.CharField(max_length=255)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='projects.Project')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
f9757cd5f5931e24e90a9be34c09ca15d7bdbedd | f0adca7cac7fb12cdb89e7e821559fe2603bf4bc | /src/234/recipe_234_02.py | 029ab82d6382993f4d8564ed733634fc696da9c6 | [] | no_license | eriamavro/python-recipe-src | dccfa06bc56fcc713f8da9e466f04d07c1f961f0 | d14f3e4cd885515e9a9a7b8e3f064609c8e50fad | refs/heads/master | 2023-02-13T02:08:44.531621 | 2021-01-14T12:03:05 | 2021-01-14T12:03:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | import requests
import json
payload = {'key1': 'value1', 'key2': 'value2'}
url = "http://httpbin.org/post"
r = requests.post(url, json=json.dumps(payload))
print(r.text)
| [
"[email protected]"
] | |
34e55c9eb4ff0873f56820e807557d8c6cb55bb7 | 214dde26c268d1d0b7991318c5e2d43aa27af89b | /visualization/capital_flow/app.py | 9c72a241df388a8420f220fc97f2591d354deea3 | [] | no_license | hellobiek/smart_deal_tool | f1846903ac402257bbe92bd23f9552970937d50e | ba8aad0a37843362f5833526921c6f700fb881f1 | refs/heads/master | 2022-09-04T04:41:34.598164 | 2022-08-04T22:04:09 | 2022-08-04T22:04:09 | 88,258,362 | 36 | 14 | null | null | null | null | UTF-8 | Python | false | false | 14,214 | py | # -*- coding: utf-8 -*-
import os
import sys
from os.path import abspath, dirname
sys.path.insert(0, dirname(dirname(dirname(abspath(__file__)))))
import dash
import dash_table
import const as ct
import pandas as pd
import dash_core_components as dcc
import dash_html_components as html
from flask_caching import Cache
from common import str_of_num
from rstock import RIndexStock
from cstock_info import CStockInfo
from visualization.dash.hgt import HGT
from visualization.dash.rzrq import RZRQ
from datetime import datetime, timedelta
from dash.dependencies import Input, Output
top100 = None
add_data = None
del_data = None
redis_host = "127.0.0.1"
dbinfo = ct.OUT_DB_INFO
mstart = None
mend = None
model_dir = "/Volumes/data/quant/stock/data/models"
report_dir = "/Volumes/data/quant/stock/data/tdx/report"
cal_file_path = "/Volumes/data/quant/stock/conf/calAll.csv"
stocks_dir = "/Volumes/data/quant/stock/data/tdx/history/days"
bonus_path = "/Volumes/data/quant/stock/data/tdx/base/bonus.csv"
rvaluation_dir = "/Volumes/data/quant/stock/data/valuation/rstock"
base_stock_path = "/Volumes/data/quant/stock/data/tdx/history/days"
valuation_path = "/Volumes/data/quant/stock/data/valuation/reports.csv"
pledge_file_dir = "/Volumes/data/quant/stock/data/tdx/history/weeks/pledge"
report_publish_dir = "/Volumes/data/quant/stock/data/crawler/stock/financial/report_announcement_date"
tushare_file_path = "/Users/hellobiek/Documents/workspace/python/quant/smart_deal_tool/configure/tushare.json"
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets = external_stylesheets, suppress_callback_exceptions = True)
CACHE_CONFIG = {
'CACHE_TYPE': 'redis',
'CACHE_REDIS_URL': '127.0.0.1:6579'
}
cache = Cache()
cache.init_app(app.server, config=CACHE_CONFIG)
app.layout = html.Div([
html.H1('资金流情况'),
dcc.DatePickerRange(
id = 'date-picker-range',
min_date_allowed = datetime(2017, 1, 1),
max_date_allowed = datetime.now(),
initial_visible_month = datetime.now(),
start_date = datetime.now() - timedelta(7),
end_date = datetime.now()
),
html.Div(id='output-start-date', style={'display': 'none'}),
html.Div(id='output-end-date', style={'display': 'none'}),
dcc.Tabs(id="tabs", value='tabs', children=[
dcc.Tab(label='港股通', value='hk-flow'),
dcc.Tab(label='融资融券', value='leveraged-funds'),
]),
html.Div(id='hold-situation', children='hgt-hold-situation-table'),
])
@cache.memoize()
def get_money_flow_data_from_rzrq(start, end):
rzrq_client = RZRQ(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host, fpath = tushare_file_path)
data = rzrq_client.get_data("ALL", start, end)
if start not in set(data.date.tolist()):
return None, None, "{} 没有数据".format(start)
if end not in set(data.date.tolist()):
return None, None, "{} 没有数据".format(end)
data['code'] = data['code'].str[0:6]
data['rzcje'] = data['rzmre'] + data['rzche']
data = data.reset_index(drop = True)
rstock = RIndexStock(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host)
rstock_info = rstock.get_data(end)
rstock_info = rstock_info.drop('date', axis = 1)
stock_info_client = CStockInfo(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host, stocks_dir = stocks_dir, base_stock_path = base_stock_path)
base_df = stock_info_client.get()
base_df = base_df[['code', 'name', 'timeToMarket', 'industry', 'sw_industry']]
rstock_info = pd.merge(rstock_info, base_df, how='inner', on=['code'])
df = pd.merge(data, rstock_info, how='left', on=['code'])
df['asserts'] = df['close'] * df['outstanding'] / 10e7
df['asserts'] = round(df['asserts'], 2)
df['rzye'] = round(df['rzye'], 2)
df['rzcje'] = round(df['rzcje'], 2)
df['rzche'] = round(df['rzche'], 2)
df['rzmre'] = round(df['rzmre'], 2)
df['rzrqye'] = round(df['rzrqye'], 2)
df = df[['date', 'code', 'name', 'rzye', 'rzmre', 'rzche', 'rzrqye', 'rzcje', 'asserts', 'industry', 'sw_industry']]
df = df.dropna(axis=0, how='any')
df = df.reset_index(drop = True)
s_data = df.loc[df.date == start]
s_data = s_data.reset_index(drop = True)
e_data = df.loc[df.date == end]
e_data = e_data.reset_index(drop = True)
return s_data, e_data, None
@cache.memoize()
def get_top20_stock_info_from_hgt(cdate):
hgt_client = HGT(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host)
info = hgt_client.get_top10_info(cdate)
info['net_turnover'] = info['buy_turnover'] - info['sell_turnover']
info = info.sort_values(by = 'net_turnover', ascending= False)
info = info.drop('rank', axis = 1)
info = info.reset_index(drop = True)
info['total_turnover'] = info['total_turnover'].apply(lambda x:str_of_num(x))
info['net_turnover'] = info['net_turnover'].apply(lambda x:str_of_num(x))
info['buy_turnover'] = info['buy_turnover'].apply(lambda x:str_of_num(x))
info['sell_turnover'] = info['sell_turnover'].apply(lambda x:str_of_num(x))
return info
@cache.memoize()
def get_money_flow_data_from_hgt(start, end):
hgt_client = HGT(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host)
sh_data = hgt_client.get_data("ALL_SH", start, end)
sz_data = hgt_client.get_data("ALL_SZ", start, end)
if start not in sh_data.date.tolist():
return None, None, "{} 没有数据".format(start)
if end not in sh_data.date.tolist():
return None, None, "{} 没有数据".format(end)
sh_data = sh_data.loc[(sh_data.date == start) | (sh_data.date == end)]
sz_data = sz_data.loc[(sz_data.date == start) | (sz_data.date == end)]
sh_data = sh_data.append(sz_data)
sh_data = sh_data.reset_index(drop = True)
rstock = RIndexStock(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host)
rstock_info = rstock.get_data(end)
rstock_info = rstock_info[['code', 'totals']]
stock_info_client = CStockInfo(dbinfo = ct.OUT_DB_INFO, redis_host = redis_host, stocks_dir = stocks_dir, base_stock_path = base_stock_path)
base_df = stock_info_client.get()
base_df = base_df[['code', 'timeToMarket', 'industry', 'sw_industry']]
rstock_info = pd.merge(rstock_info, base_df, how='inner', on=['code'])
df = pd.merge(sh_data, rstock_info, how='left', on=['code'])
df = df.dropna(axis=0, how='any')
df = df.reset_index(drop = True)
df['percent'] = 100 * df['volume'] / df['totals']
df = df[['date', 'code', 'name', 'timeToMarket', 'industry', 'sw_industry', 'percent', 'volume', 'totals']]
start_data = df.loc[df.date == start]
start_data = start_data.sort_values(by = 'percent', ascending= False)
start_data = start_data.reset_index(drop = True)
end_data = df.loc[df.date == end]
end_data = end_data.sort_values(by = 'percent', ascending= False)
end_data = end_data.reset_index(drop = True)
top100 = end_data.loc[end_data.percent > 5]
top100 = top100.reset_index(drop = True)
top100['percent'] = round(top100['percent'], 2)
start_data = start_data[['code', 'percent']]
start_data = start_data.rename(columns = {"percent": "spercent"})
cdata = pd.merge(end_data, start_data, how='left', on=['code'])
cdata = cdata.dropna(axis=0, how='any')
cdata['delta_percent'] = cdata['percent'] - cdata['spercent']
cdata = cdata[['date', 'code', 'name', 'timeToMarket', 'industry', 'sw_industry', 'delta_percent', 'volume', 'totals']]
cdata['delta_percent'] = round(cdata['delta_percent'], 2)
cdata = cdata.sort_values(by = 'delta_percent', ascending= False)
cdata = cdata.reset_index(drop = True)
add_data = cdata.loc[cdata.delta_percent > 0]
add_data = add_data.sort_values(by = 'delta_percent', ascending= False)
add_data = add_data.head(30)
add_data = add_data.reset_index(drop = True)
del_data = cdata.loc[cdata.delta_percent < 0]
del_data = del_data.sort_values(by = 'delta_percent', ascending= True)
del_data = del_data.head(30)
del_data = del_data.reset_index(drop = True)
return top100, add_data, del_data
@app.callback(
[Output('output-start-date', 'children'), Output('output-end-date', 'children')],
[Input('date-picker-range', 'start_date'), Input('date-picker-range', 'end_date')])
def update_date(start_date, end_date):
global mstart, mend
if start_date is not None and end_date is not None:
mstart = start_date.split(' ')[0]
mend = end_date.split(' ')[0]
return mstart, mend
return None, None
@app.callback(Output('hold-situation', 'children'),
[Input('tabs', 'value'), Input('output-start-date', 'children'), Input('output-end-date', 'children')])
def render_content(model_name, start_date, end_date):
if model_name == 'hk-flow':
global top100, add_data, del_data
top100, add_data, del_data = get_money_flow_data_from_hgt(start_date, end_date)
top20_info = get_top20_stock_info_from_hgt(end_date)
if top20_info is None or top20_info.empty:
return html.Div([html.H3('{} : 二十大热门股没有数据'.format(end_date))])
else:
if top100 is None:
return html.Div([
html.H3('{}日的20大成交额股票(按照净买入额排序)'.format(end_date)),
dash_table.DataTable(
id = 'hgt-top20-data',
columns = [{"name": i, "id": i} for i in top20_info.columns],
data = top20_info.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('{}: 港股通数据有错误'.format(end_date))])
else:
return html.Div([
html.H3('{}日的20大成交额股票(按照净买入额排序)'.format(end_date)),
dash_table.DataTable(
id = 'hgt-top20-data',
columns = [{"name": i, "id": i} for i in top20_info.columns],
data = top20_info.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('{}日持股比例最多的100只股票(持有股本/总股本)'.format(end_date)),
dash_table.DataTable(
id = 'hgt-data',
columns = [{"name": i, "id": i} for i in top100.columns],
data = top100.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('持股比例增加最多的30只股票(持有股本/总股本)'),
dash_table.DataTable(
id = 'hgt-add-data',
columns = [{"name": i, "id": i} for i in add_data.columns],
data = add_data.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('持股比例减少最多的30只股票(持有股本/总股本)'),
dash_table.DataTable(
id = 'hgt-del-data',
columns = [{"name": i, "id": i} for i in del_data.columns],
data = del_data.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
])
elif model_name == 'leveraged-funds':
s_data, e_data, msg = get_money_flow_data_from_rzrq(start_date, end_date)
if s_data is None or e_data is None:
return html.Div([html.H3(msg)])
add_data = e_data[['code', 'name', 'rzrqye', 'industry']]
add_data = add_data.rename(columns = {"rzrqye": "end_rzrqye"})
del_data = s_data[['code', 'rzrqye']]
del_data = del_data.rename(columns = {"rzrqye": "start_rzrqye"})
df = pd.merge(add_data, del_data, how='left', on=['code'])
df['delta_rzrqye'] = round(df['end_rzrqye'] - df['start_rzrqye'], 2)
df = df[['code', 'name', 'industry', 'start_rzrqye', 'end_rzrqye', 'delta_rzrqye']]
add_data = df.nlargest(30, 'delta_rzrqye')
df['delta_rzrqye'] = df['delta_rzrqye'] * -1
del_data = df.nlargest(30, 'delta_rzrqye')
del_data['delta_rzrqye'] = del_data['delta_rzrqye'] * -1
return html.Div([
html.H3('{}日的融资成交额股票(按照净买入额排序)'.format(end_date)),
dash_table.DataTable(
id = 'rzrq-data',
columns = [{"name": i, "id": i} for i in e_data.columns],
data = e_data.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('持股比例增加最多的30只股票(融资融券余额/流通市值)'),
dash_table.DataTable(
id = 'rzrq-add-data',
columns = [{"name": i, "id": i} for i in add_data.columns],
data = add_data.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
html.H3('持股比例减少最多的30只股票(融资融券余额/流通市值)'),
dash_table.DataTable(
id = 'rzrq-del-data',
columns = [{"name": i, "id": i} for i in del_data.columns],
data = del_data.to_dict('records'),
style_cell={'textAlign': 'center'},
sort_action = "native",
),
])
if __name__ == '__main__':
#start_date = '2020-07-03'
#end_date = '2020-07-08'
#s_data, e_data, msg = get_money_flow_data_from_rzrq(start_date, end_date)
#import /Users/hellobiek/Documents/workspace/python/quant/smart_deal_tool/visualization/capital_flowpdb
#pdb.set_trace()
app.run_server(debug = True, port = 9998)
| [
"[email protected]"
] | |
94a836f98274030034fc1d71f9ea205e92cb8242 | 9c8c8ae3842ec9a6f36730234c02f93f71ebda20 | /vndk/tools/sourcedr/ninja/list_installed_module_under_source.py | 3643e9d57df15529f03701ae39cfbbabc54bc9a2 | [
"Apache-2.0"
] | permissive | batyrf/platform_development | 437bc6560a062d0ce7b27bab17b78109a72b1773 | d4f7efc0c58598e3fc02a1e4fe8e751bd4ae8f0a | refs/heads/master | 2020-12-26T18:37:29.529464 | 2020-02-01T04:54:27 | 2020-02-01T04:54:27 | 237,598,759 | 3 | 0 | null | 2020-02-01T10:35:07 | 2020-02-01T10:35:06 | null | UTF-8 | Python | false | false | 2,232 | py | #!/usr/bin/env python3
import argparse
import itertools
import json
import posixpath
import re
def match_any(regex, iterable):
"""Check whether any element in iterable matches regex."""
return any(regex.match(elem) for elem in iterable)
class ModuleInfo(object):
def __init__(self, module_info_path):
with open(module_info_path, 'r') as module_info_file:
self._json = json.load(module_info_file)
def list(self, installed_filter=None, module_definition_filter=None):
for name, info in self._json.items():
installs = info['installed']
paths = info['path']
if installed_filter and not match_any(installed_filter, installs):
continue
if module_definition_filter and \
not match_any(module_definition_filter, paths):
continue
for install, path in itertools.product(installs, paths):
yield (install, path)
def _parse_args():
"""Parse command line arguments"""
parser = argparse.ArgumentParser()
parser.add_argument('module_info', help='Path to module-info.json')
parser.add_argument('--out-dir', default='out',
help='Android build output directory')
parser.add_argument('--installed-filter',
help='Installation filter (regular expression)')
parser.add_argument('--module-definition-filter',
help='Module definition filter (regular expression)')
return parser.parse_args()
def main():
"""Main function"""
args = _parse_args()
installed_filter = None
if args.installed_filter:
installed_filter = re.compile(
re.escape(posixpath.normpath(args.out_dir)) + '/' +
'(?:' + args.installed_filter + ')')
module_definition_filter = None
if args.module_definition_filter:
module_definition_filter = re.compile(args.module_definition_filter)
module_info = ModuleInfo(args.module_info)
for installed_file, module_path in \
module_info.list(installed_filter, module_definition_filter):
print(installed_file, module_path)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
7be77a226991e8e5cd0cfa304d5c6e570a73c828 | 75eac06d5714843f1f4a1ead6d8a3164adcb9a61 | /csqa/models/bert_sep.py | 2f14af9e1c38b1fd04d1c54e957139e86b27b5be | [] | no_license | Shuailong/csqa | 0b3b8de0fc139d84c4841a948fff69a3d0855326 | bc03dfbb1abe8fd37feee2870210f4209ad1d6af | refs/heads/master | 2022-01-04T17:52:53.909954 | 2020-03-28T04:59:45 | 2020-03-28T04:59:45 | 181,131,710 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,157 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author: Shuailong
# @Email: [email protected]
# @Date: 2019-05-18 23:07:29
# @Last Modified by: Shuailong
# @Last Modified time: 2019-05-18 23:07:39
import logging
from typing import Any, Dict, List, Optional
from overrides import overrides
import torch
from allennlp.common.checks import check_dimensions_match
from allennlp.data import Vocabulary
from allennlp.models.model import Model
from allennlp.modules import TextFieldEmbedder, FeedForward
from allennlp.nn import InitializerApplicator, RegularizerApplicator
from allennlp.training.metrics import CategoricalAccuracy
from allennlp.modules.attention import DotProductAttention
from allennlp.nn.util import weighted_sum
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
@Model.register("csqa-bert-sep")
class CSQABertSep(Model):
"""
This class implements baseline Bert model for commonsenseqa dataset descibed in NAACL 2019 paper
CommonsenseQA: A Question Answering Challenge Targeting Commonsense Knowledge [https://arxiv.org/abs/1811.00937].
In this set-up, a single instance is a list of question answer pairs, and an answer index to indicate
which one is correct.
Parameters
----------
vocab : ``Vocabulary``
text_field_embedder : ``TextFieldEmbedder``
Used to embed the ``qa_pairs`` ``TextFields`` we get as input to the model.
dropout : ``float``, optional (default=0.2)
If greater than 0, we will apply dropout with this probability after all encoders (pytorch
LSTMs do not apply dropout to their last layer).
"""
def __init__(self, vocab: Vocabulary,
bert: TextFieldEmbedder,
classifier: FeedForward,
dropout: float = 0.1,
initializer: InitializerApplicator = InitializerApplicator(),
regularizer: Optional[RegularizerApplicator] = None) -> None:
super().__init__(vocab, regularizer)
self._bert = bert
self._classifier = classifier
if dropout:
self.dropout = torch.nn.Dropout(dropout)
else:
self.dropout = None
self._pooler = FeedForward(input_dim=bert.get_output_dim(),
num_layers=1,
hidden_dims=bert.get_output_dim(),
activations=torch.tanh)
check_dimensions_match(bert.get_output_dim() * 2, classifier.get_input_dim(),
"bert embedding dim", "classifier input dim")
self._accuracy = CategoricalAccuracy()
self._loss = torch.nn.CrossEntropyLoss()
initializer(self)
def forward(self, # type: ignore
question: Dict[str, torch.LongTensor],
choices: Dict[str, torch.LongTensor],
evidence: Dict[str, torch.LongTensor],
answer_index: torch.IntTensor = None,
metadata: List[Dict[str, Any]
] = None # pylint:disable=unused-argument
) -> Dict[str, torch.Tensor]:
# pylint: disable=arguments-differ
"""
Parameters
----------
qa_pairs : Dict[str, torch.LongTensor]
From a ``ListField``.
answer_index : ``torch.IntTensor``, optional
From an ``IndexField``. This is what we are trying to predict.
If this is given, we will compute a loss that gets included in the output dictionary.
metadata : ``List[Dict[str, Any]]``, optional
If present, this should contain the question ID, question and choices for each instance
in the batch. The length of this list should be the batch size, and each dictionary
should have the keys ``qid``, ``question``, ``choices``, ``question_tokens`` and
``choices_tokens``.
Returns
-------
An output dictionary consisting of the followings.
qid : List[str]
A list consisting of question ids.
answer_logits : torch.FloatTensor
A tensor of shape ``(batch_size, num_options=5)`` representing unnormalised log
probabilities of the choices.
answer_probs : torch.FloatTensor
A tensor of shape ``(batch_size, num_options=5)`` representing probabilities of the
choices.
loss : torch.FloatTensor, optional
A scalar loss to be optimised.
"""
# batch, seq_len -> batch, seq_len, emb
question_hidden = self._bert(question)
batch_size, emb_size = question_hidden.size(0), question_hidden.size(2)
question_hidden = question_hidden[..., 0, :] # batch, emb
# batch, 5, seq_len -> batch, 5, seq_len, emb
choice_hidden = self._bert(choices, num_wrapping_dims=1)
choice_hidden = choice_hidden[..., 0, :] # batch, 5, emb
if self.dropout:
question_hidden = self.dropout(question_hidden)
choice_hidden = self.dropout(choice_hidden)
question_hidden = question_hidden.unsqueeze(
1).expand(batch_size, 5, emb_size)
cls_hidden = torch.cat([question_hidden, choice_hidden],
dim=-1)
# batch,5,emb*2
# the final MLP -- apply dropout to input, and MLP applies to hidden
answer_logits = self._classifier(cls_hidden).squeeze(-1) # batch, 5
answer_probs = torch.nn.functional.softmax(answer_logits, dim=-1)
qids = [m['qid'] for m in metadata]
output_dict = {"answer_logits": answer_logits,
"answer_probs": answer_probs,
"qid": qids}
if answer_index is not None:
answer_index = answer_index.squeeze(-1) # batch
loss = self._loss(answer_logits, answer_index)
self._accuracy(answer_logits, answer_index)
output_dict["loss"] = loss
return output_dict
@overrides
def get_metrics(self, reset: bool = False) -> Dict[str, float]:
return {'accuracy': self._accuracy.get_metric(reset)}
| [
"[email protected]"
] | |
85297224463e89bbcee3a6b86337b908c5929cb2 | 8a0e14299d8b915c0a909cf9fa9a86589dc63d76 | /python/ray/tune/automl/__init__.py | cab4c4de4dab106306090e7cdc11ee1396f99abd | [
"Apache-2.0",
"MIT"
] | permissive | natashamjaques/ray | 795e4271c3c5f3e261327afea40b81ffe6f362ac | aca9dd5ee7a8fef508a5383fdd26ad8ccdcb16e4 | refs/heads/master | 2020-04-12T05:58:15.680359 | 2019-03-06T22:08:10 | 2019-03-06T22:08:10 | 162,337,948 | 3 | 2 | Apache-2.0 | 2018-12-18T19:47:02 | 2018-12-18T19:47:01 | null | UTF-8 | Python | false | false | 464 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from ray.tune.automl.genetic_searcher import GeneticSearch
from ray.tune.automl.search_policy import GridSearch, RandomSearch
from ray.tune.automl.search_space import SearchSpace, \
ContinuousSpace, DiscreteSpace
__all__ = [
"ContinuousSpace",
"DiscreteSpace",
"SearchSpace",
"GridSearch",
"RandomSearch",
"GeneticSearch",
]
| [
"[email protected]"
] | |
0ceaa149f62c4d0ac1618af38585c3570814e82d | 6aa7e203f278b9d1fd01244e740d5c944cc7c3d3 | /airflow/providers/apache/kylin/hooks/kylin.py | 59f6ce94ff23200923bd0942ba05a73279150f5b | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"Python-2.0"
] | permissive | laserpedro/airflow | 83fc991d91749550b151c81876d9e7864bff3946 | a28afa8172489e41ecf7c381674a0cb91de850ff | refs/heads/master | 2023-01-02T04:55:34.030935 | 2020-10-24T15:55:11 | 2020-10-24T15:55:11 | 285,867,990 | 1 | 0 | Apache-2.0 | 2020-08-07T15:56:49 | 2020-08-07T15:56:49 | null | UTF-8 | Python | false | false | 2,795 | py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Optional
from kylinpy import exceptions, kylinpy
from airflow.exceptions import AirflowException
from airflow.hooks.base_hook import BaseHook
class KylinHook(BaseHook):
"""
:param kylin_conn_id: The connection id as configured in Airflow administration.
:type kylin_conn_id: str
:param project: porject name
:type project: Optional[str]
:param dsn: dsn
:type dsn: Optional[str]
"""
def __init__(self,
kylin_conn_id: Optional[str] = 'kylin_default',
project: Optional[str] = None,
dsn: Optional[str] = None
):
super().__init__()
self.kylin_conn_id = kylin_conn_id
self.project = project
self.dsn = dsn
def get_conn(self):
conn = self.get_connection(self.kylin_conn_id)
if self.dsn:
return kylinpy.create_kylin(self.dsn)
else:
self.project = self.project if self.project else conn.schema
return kylinpy.Kylin(conn.host, username=conn.login,
password=conn.password, port=conn.port,
project=self.project, **conn.extra_dejson)
def cube_run(self, datasource_name, op, **op_args):
"""
run CubeSource command whitch in CubeSource.support_invoke_command
:param datasource_name:
:param op: command
:param op_args: command args
:return: response
"""
cube_source = self.get_conn().get_datasource(datasource_name)
try:
response = cube_source.invoke_command(op, **op_args)
return response
except exceptions.KylinError as err:
raise AirflowException("Cube operation {} error , Message: {}".format(op, err))
def get_job_status(self, job_id):
"""
get job status
:param job_id: kylin job id
:return: job status
"""
return self.get_conn().get_job(job_id).status
| [
"[email protected]"
] | |
c618f3a535441e5c8587f2f8d2c91d6c2a046dd8 | 113f8ae533a75e9f2fdc1728661af0f19c8460a6 | /books_app/books_app/settings.py | 8f53b3a945f604d8a773d85e73cdd69c268b132c | [] | no_license | PeterM358/Python-web-2021 | cf08beaa3330495afc53e640f4a2aaf0429049e9 | a3b7e1d1be0cc85675aaff646917d4f5b7f97b00 | refs/heads/master | 2023-07-09T15:09:08.868548 | 2021-07-24T13:49:22 | 2021-07-24T13:49:22 | 382,328,747 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,403 | py | """
Django settings for books_app project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-e05f*w&+x@+@w7-9g*7z!4^%7u+xmeb9uxz*j@!kz(e5max0c('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'books_app.books',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'books_app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'books_app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'books',
'USER': 'postgres',
'PASSWORD': 'asdf1234',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"estestveno"
] | estestveno |
d010fb79c796f34db9c3ccef04a23dd8ba9fc29a | 2a54e8d6ed124c64abb9e075cc5524bb859ba0fa | /.history/8-loops_20200406005828.py | 5e027ff5acfe70abba31bc7f2389a11006536d94 | [] | no_license | CaptainStorm21/Python-Foundation | 01b5fbaf7a913506518cf22e0339dd948e65cea1 | a385adeda74f43dd7fb2d99d326b0be23db25024 | refs/heads/master | 2021-05-23T01:29:18.885239 | 2020-04-23T19:18:06 | 2020-04-23T19:18:06 | 253,171,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 944 | py | # A for loop is used for iterating over a sequence (that is either a list, a tuple, a dictionary, a set, or a string).
# Simple Loop
# people = ['John', 'Mary', 'Anna', 'Margaret', 'Sylvia']
# for person in people:
# print('Current person is: ', person)
# Break
# people1 = ['John', 'Mary', 'Anna', 'Margaret', 'Sylvia', 'Monique']
# for child in people1:
# if child == 'Anna':
# print('Current child is: ', child)
# break
# gamers = ['John', 'Mary', 'Anna', 'Margaret', 'Sylvia', 'Monique']
# for person in gamers:
# if person == 'Caty':
# continue
# print('Current gamer is: ', person)
# Range
# gamers = ['John', 'Mary', 'Anna', 'Margaret', 'Sylvia', 'Monique']
# for i in range (len(gamers)):
# print('Current gamer: ', gamers[i])
# for i in range (0, 10):
# print ('Number ', i)
# While loops execute a set of statements as long as a condition is true.
count = 0
| [
"[email protected]"
] | |
f694103ad29e76cd74411c21fb687a6e63fcbdbf | 6bda8a6e44d09397ada6ed222800e16f071674bf | /src/numdifftools/profiletools.py | 4e6374add0e9bed8d01cf6a6f24116263cc71f59 | [
"BSD-3-Clause"
] | permissive | pbrod/numdifftools | 557af2ee288339737a9e005fb0485542c13e8891 | 4f62e51d4776cc6acbdfb6268482635a487b860c | refs/heads/master | 2023-07-20T19:26:53.241589 | 2022-11-14T13:39:42 | 2022-11-14T13:39:42 | 17,676,169 | 237 | 52 | BSD-3-Clause | 2023-07-05T15:21:37 | 2014-03-12T17:31:06 | Python | UTF-8 | Python | false | false | 5,763 | py | """
This module is based on: https://zapier.com/engineering/profiling-python-boss/
See also:
https://www.pythoncentral.io/measure-time-in-python-time-time-vs-time-clock/
"""
from __future__ import absolute_import, print_function
import inspect
import cProfile
from functools import wraps
from timeit import default_timer as timer
import warnings
try:
from line_profiler import LineProfiler
def _add_all_class_methods(profiler, cls, except_=''):
for k, v in inspect.getmembers(cls, inspect.ismethod):
if k != except_:
profiler.add_function(v)
def _add_function_or_classmethod(profiler, f, args):
if isinstance(f, str): # f is a method of the
cls = args[0] # class instance
profiler.add_function(getattr(cls, f))
else:
profiler.add_function(f)
def do_profile(follow=(), follow_all_methods=False):
"""
Decorator to profile a function or class method
It uses line_profiler to give detailed reports on time spent on each
line in the code.
Pros: has intuitive and finely detailed reports. Can follow
functions in third party libraries.
Cons:
has external dependency on line_profiler and is quite slow,
so don't use it for benchmarking.
Handy tip:
Just decorate your test function or class method and pass any
additional problem function(s) in the follow argument!
If any follow argument is a string, it is assumed that the string
refers to bound a method of the class
See also
--------
do_cprofile, test_do_profile
"""
def inner(func):
def profiled_func(*args, **kwargs):
try:
profiler = LineProfiler()
profiler.add_function(func)
if follow_all_methods:
cls = args[0] # class instance
_add_all_class_methods(profiler, cls,
except_=func.__name__)
for f in follow:
_add_function_or_classmethod(profiler, f, args)
profiler.enable_by_count()
return func(*args, **kwargs)
finally:
profiler.print_stats()
return profiled_func
return inner
except ImportError as error:
LineProfiler = None
warnings.warn(str(error))
def do_profile(follow=(), follow_all_methods=False):
"Helpful if you accidentally leave in production!"
def inner(func):
def nothing(*args, **kwargs):
return func(*args, **kwargs)
return nothing
return inner
def timefun(fun):
""" Timing decorator
Timers require you to do some digging. Start wrapping a few of the higher level
functions and confirm where the bottleneck is, then drill down into that function,
repeating as you go. When you find the disproportionately slow bit of code, fix it,
and work your way back out confirming that it is fixed.
Handy tip: Don't forget the handy timeit module! It tends to be more useful for
benchmarking small pieces of code than for doing the actual investigation.
Timer Pros:
Easy to understand and implement. Also very simple to compare before and after fixes.
Works across many languages.
Timer Cons:
Sometimes a little too simplistic for extremely complex codebases, you might spend
more time placing and replacing boilerplate code than you will fixing the problem!
"""
@wraps(fun)
def measure_time(*args, **kwargs):
t1 = timer()
result = fun(*args, **kwargs)
t2 = timer()
print("@timefun:" + fun.__name__ + " took " + str(t2 - t1) + " seconds")
return result
return measure_time
class TimeWith():
"""
Timing context manager
"""
def __init__(self, name=''):
self.name = name
self.start = timer()
@property
def elapsed(self):
return timer() - self.start
def checkpoint(self, name=''):
print('{timer} {checkpoint} took {elapsed} seconds'.format(timer=self.name,
checkpoint=name,
elapsed=self.elapsed,
).strip())
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.checkpoint('finished')
def do_cprofile(func):
"""
Decorator to profile a function
It gives good numbers on various function calls but it omits a vital piece
of information: what is it about a function that makes it so slow?
However, it is a great start to basic profiling. Sometimes it can even
point you to the solution with very little fuss. I often use it as a
gut check to start the debugging process before I dig deeper into the
specific functions that are either slow are called way too often.
Pros:
No external dependencies and quite fast. Useful for quick high-level
checks.
Cons:
Rather limited information that usually requires deeper debugging; reports
are a bit unintuitive, especially for complex codebases.
See also
--------
do_profile, test_do_profile
"""
def profiled_func(*args, **kwargs):
profile = cProfile.Profile()
try:
profile.enable()
result = func(*args, **kwargs)
profile.disable()
return result
finally:
profile.print_stats()
return profiled_func
| [
"[email protected]"
] | |
14c7f9577956db004b7db590687e30e8fdba3192 | ad0e853db635edc578d58891b90f8e45a72a724f | /rllib/examples/inference_and_serving/policy_inference_after_training.py | 17f033847ec1c046e9d6f405d8517c6f099104ee | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | ericl/ray | 8c93fc713af3b753215d4fe6221278700936e2db | e9a1c6d814fb1a81033809f56695030d651388f5 | refs/heads/master | 2023-08-31T11:53:23.584855 | 2023-06-07T21:04:28 | 2023-06-07T21:04:28 | 91,077,004 | 2 | 4 | Apache-2.0 | 2023-01-11T17:19:10 | 2017-05-12T09:51:04 | Python | UTF-8 | Python | false | false | 3,804 | py | """
Example showing how you can use your trained policy for inference
(computing actions) in an environment.
Includes options for LSTM-based models (--use-lstm), attention-net models
(--use-attention), and plain (non-recurrent) models.
"""
import argparse
import gymnasium as gym
import os
import ray
from ray import air, tune
from ray.rllib.algorithms.algorithm import Algorithm
from ray.tune.registry import get_trainable_cls
parser = argparse.ArgumentParser()
parser.add_argument(
"--run", type=str, default="PPO", help="The RLlib-registered algorithm to use."
)
parser.add_argument("--num-cpus", type=int, default=0)
parser.add_argument(
"--framework",
choices=["tf", "tf2", "torch"],
default="torch",
help="The DL framework specifier.",
)
parser.add_argument("--eager-tracing", action="store_true")
parser.add_argument(
"--stop-iters",
type=int,
default=200,
help="Number of iterations to train before we do inference.",
)
parser.add_argument(
"--stop-timesteps",
type=int,
default=100000,
help="Number of timesteps to train before we do inference.",
)
parser.add_argument(
"--stop-reward",
type=float,
default=150.0,
help="Reward at which we stop training before we do inference.",
)
parser.add_argument(
"--explore-during-inference",
action="store_true",
help="Whether the trained policy should use exploration during action "
"inference.",
)
parser.add_argument(
"--num-episodes-during-inference",
type=int,
default=10,
help="Number of episodes to do inference over after training.",
)
if __name__ == "__main__":
args = parser.parse_args()
ray.init(num_cpus=args.num_cpus or None)
config = (
get_trainable_cls(args.run)
.get_default_config()
.environment("FrozenLake-v1")
# Run with tracing enabled for tf2?
.framework(args.framework, eager_tracing=args.eager_tracing)
# Use GPUs iff `RLLIB_NUM_GPUS` env var set to > 0.
.resources(num_gpus=int(os.environ.get("RLLIB_NUM_GPUS", "0")))
)
stop = {
"training_iteration": args.stop_iters,
"timesteps_total": args.stop_timesteps,
"episode_reward_mean": args.stop_reward,
}
print("Training policy until desired reward/timesteps/iterations. ...")
tuner = tune.Tuner(
args.run,
param_space=config.to_dict(),
run_config=air.RunConfig(
stop=stop,
verbose=2,
checkpoint_config=air.CheckpointConfig(
checkpoint_frequency=1, checkpoint_at_end=True
),
),
)
results = tuner.fit()
print("Training completed. Restoring new Trainer for action inference.")
# Get the last checkpoint from the above training run.
checkpoint = results.get_best_result().checkpoint
# Create new Algorithm and restore its state from the last checkpoint.
algo = Algorithm.from_checkpoint(checkpoint)
# Create the env to do inference in.
env = gym.make("FrozenLake-v1")
obs, info = env.reset()
num_episodes = 0
episode_reward = 0.0
while num_episodes < args.num_episodes_during_inference:
# Compute an action (`a`).
a = algo.compute_single_action(
observation=obs,
explore=args.explore_during_inference,
policy_id="default_policy", # <- default value
)
# Send the computed action `a` to the env.
obs, reward, done, truncated, _ = env.step(a)
episode_reward += reward
# Is the episode `done`? -> Reset.
if done:
print(f"Episode done: Total reward = {episode_reward}")
obs, info = env.reset()
num_episodes += 1
episode_reward = 0.0
algo.stop()
ray.shutdown()
| [
"[email protected]"
] | |
1ea7ec9cd6f0f33042d9eac704a7f47a193c0f13 | 8bcf973008b1d7549f59501a1667909848ea87dd | /Day0617/staff_info/bin/start.py | ff176549a916a65c76e64836aa50c52a7c6e5635 | [] | no_license | simplesmall/Python-FullStack | 74ffeb2119eecb7fcb21a136d01aaaf2bcc2c24c | 210844ef6443a5543d49a20dbec2db9a9b960230 | refs/heads/master | 2022-12-17T00:56:40.515335 | 2019-11-15T02:07:57 | 2019-11-15T02:07:57 | 221,816,447 | 0 | 1 | null | 2022-12-13T19:22:26 | 2019-11-15T01:10:55 | Python | UTF-8 | Python | false | false | 327 | py | import sys
import os
# print(sys.path)
#获取start.py的路径
#当前文件往上翻两层 staff_info
project_path = os.path.dirname(os.path.dirname(__file__))
sys.path.append(project_path) #把staff_info添加到sys.path中
print(project_path)
from core import main
if __name__ == '__main__':
main.home()
| [
"[email protected]"
] | |
e3ae23e183adf64fde585cc7af4664706cfcceab | eed9b3d099facd98b8a139681808997d60b4e19c | /decorator_opt_arg/decorators.py | 4442035bdc787580a9d4d98b7258dade8ef37179 | [] | no_license | pybites/blog_code | 1240a3393a3672681d97c369711be6c7415d8c10 | 902ebb87e5f7a407714d0e399833f0331a1b915d | refs/heads/master | 2022-12-10T19:50:57.718119 | 2020-08-08T17:13:15 | 2020-08-08T17:13:15 | 76,716,190 | 49 | 47 | null | 2022-11-22T01:54:20 | 2016-12-17T09:51:12 | HTML | UTF-8 | Python | false | false | 514 | py | from functools import wraps
import time
def sleep(seconds=None):
def real_decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
print('Sleeping for {} seconds'.format(seconds))
time.sleep(seconds if seconds else 1)
return func(*args, **kwargs)
return wrapper
return real_decorator
if __name__ == '__main__':
@sleep(1) # @sleep without arg fails
def hello():
print('hello world')
for _ in range(3):
hello()
| [
"[email protected]"
] | |
71ba4ee7dbdb38f9f5e41c9b92d886fda6729209 | 91c7de67e656fec2b9c32b64e1b6ae88083a0283 | /functional_tests/test_simple_list_creation.py | f5aee3c61fd7a18d274cbbaf40fa57f4feb504f4 | [] | no_license | pohily/TDD | e0a85c60c5ee2e7388323ffb00b7fe81372431c1 | 60d2a0f9debfcc22be54d85e981aee23f8113563 | refs/heads/master | 2022-05-04T20:07:46.296627 | 2019-07-24T11:57:19 | 2019-07-24T11:57:19 | 189,567,223 | 0 | 0 | null | 2022-04-22T21:23:44 | 2019-05-31T09:28:16 | JavaScript | UTF-8 | Python | false | false | 3,307 | py | from .base import FunctionalTest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class NewVisitorTest(FunctionalTest):
def test_can_start_a_list_for_one_user(self):
# Edith has heard about a cool new online to-do app. She goes
# to check out its homepage
self.browser.get(self.live_server_url)
# She notices the page title and header mention to-do lists
self.assertIn('To-Do', self.browser.title)
header_text = self.browser.find_element_by_tag_name('h1').text
self.assertIn('To-Do', header_text)
# She is invited to enter a to-do item straight away
inputbox = self.get_item_input_box()
self.assertEqual(
inputbox.get_attribute('placeholder'),
'Enter a to-do item'
)
# She types "Buy peacock feathers" into a text box (Edith's hobby
# is tying fly-fishing lures)
inputbox.send_keys('Buy peacock feathers')
# When she hits enter, the page updates, and now the page lists
# "1: Buy peacock feathers" as an item in a to-do list table
inputbox.send_keys(Keys.ENTER)
self.wait_for_row_in_list_table('1: Buy peacock feathers')
# There is still a text box inviting her to add another item. She
# enters "Use peacock feathers to make a fly" (Edith is very
# methodical)
self.add_list_item('Use peacock feathers to make a fly')
# The page updates again, and now shows both items on her list
self.wait_for_row_in_list_table('2: Use peacock feathers to make a fly')
self.wait_for_row_in_list_table('1: Buy peacock feathers')
# Satisfied, she goes back to sleep
def test_multiple_users_can_start_lists_at_different_urls(self):
# Edith starts a new to-do list
self.browser.get(self.live_server_url)
self.add_list_item('Buy peacock feathers')
# She notices that her list has a unique URL
edith_list_url = self.browser.current_url
self.assertRegex(edith_list_url, '/lists/.+')
# Now a new user, Francis, comes along to the site.
## We use a new browser session to make sure that no information
## of Edith's is coming through from cookies etc
self.browser.quit()
self.browser = webdriver.Firefox()
# Francis visits the home page. There is no sign of Edith's
# list
self.browser.get(self.live_server_url)
page_text = self.browser.find_element_by_tag_name('body').text
self.assertNotIn('Buy peacock feathers', page_text)
self.assertNotIn('make a fly', page_text)
# Francis starts a new list by entering a new item. He
# is less interesting than Edith...
self.add_list_item('Buy milk')
# Francis gets his own unique URL
francis_list_url = self.browser.current_url
self.assertRegex(francis_list_url, '/lists/.+')
self.assertNotEqual(francis_list_url, edith_list_url)
# Again, there is no trace of Edith's list
page_text = self.browser.find_element_by_tag_name('body').text
self.assertNotIn('Buy peacock feathers', page_text)
self.assertIn('Buy milk', page_text)
# Satisfied, they both go back to sleep
| [
"[email protected]"
] | |
713275915abef8843f8041d6f606da3ed88339b9 | f77593e9e9a112e85acd3c73c056a7466d76e15e | /request_delivery_installation/request_delivery_installation/urls.py | d15cc80688686b4ea06f1692684c43314ce8d0e5 | [] | no_license | geethusuresh/reqest_installation | bf47c915aee1e1f7730ea858c000a6dd434a79fb | d047fa9f303273915651d0cbe03b7795f157f31c | refs/heads/master | 2021-01-25T04:09:10.282831 | 2014-09-28T06:40:10 | 2014-09-28T06:40:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,498 | py | from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib import admin
admin.autodiscover()
from web.views import *
urlpatterns = patterns('',
url(r'^$', login_required(Home.as_view()), name='home'),
url(r'^accounts/login/$', Login.as_view(), name='login'),
url(r'^logout/$', Logout.as_view(), name='logout'),
url(r'^register/$', login_required(Signup.as_view()), name='register'),
url(r'^dealer/(?P<user_id>[\d+]+)/add/subdealer/$',login_required(AddSubDealer.as_view()), name="add_subdealer"),
url(r'^add_purchase_info/$', login_required(AddPurchanseInfo.as_view()), name='add_purchase_info'),
url(r'^fetch_brand_names/$', FetchBrandNames.as_view(), name='fetch_brand_names'),
url(r'^fetch_purchase_sales_men/$', FetchPurchaseSalesManList.as_view(), name='fetch_purchase_sales_men'),
url(r'^fetch_dealers/$', FetchDealersList.as_view(), name='fetch_dealers'),
url(r'^purchase_info/(?P<purchase_info_id>[\d+]+)/$', login_required(PurchaseInfoView.as_view()), name='purchase_info'),
url(r'^search_purchase_info/(?P<delivery_order_number>[\w-]+)/$', login_required(SearchPurchaseInfo.as_view()), name="search_purchase_info"),
url(r'^fetch_dealer_company_names/$', FetchFirmNames.as_view(), name='fetch_firm_names'),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
| [
"[email protected]"
] | |
fe0088ebf9df88a75d965d942d219349f3a68477 | 70bfe6d30059ea78f3e12921e1875773782453f6 | /tencentcloud/vod/v20180717/errorcodes.py | 2d03545d2145754adfdcd1419abaf7232659bc47 | [
"Apache-2.0"
] | permissive | sangliangliang/tencentcloud-sdk-python | d28e308df871fc6a94d3afb59f3365a6cc865f1c | 82c7fc4da7f5131688fc01dc90d4465b7b3b41a2 | refs/heads/master | 2023-08-03T21:44:05.595225 | 2021-09-13T01:10:52 | 2021-09-13T01:10:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,847 | py | # -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# CAM签名/鉴权错误。
AUTHFAILURE = 'AuthFailure'
# 操作失败。
FAILEDOPERATION = 'FailedOperation'
# 操作失败:超过分类层数限制。
FAILEDOPERATION_CLASSLEVELLIMITEXCEEDED = 'FailedOperation.ClassLevelLimitExceeded'
# 操作失败:分类名称重复。
FAILEDOPERATION_CLASSNAMEDUPLICATE = 'FailedOperation.ClassNameDuplicate'
# 操作失败:分类不存在。
FAILEDOPERATION_CLASSNOFOUND = 'FailedOperation.ClassNoFound'
# 操作失败:不支持的封面类型。
FAILEDOPERATION_COVERTYPE = 'FailedOperation.CoverType'
# 用户账户异常。
FAILEDOPERATION_INVALIDACCOUNT = 'FailedOperation.InvalidAccount'
# 没有开通点播业务。
FAILEDOPERATION_INVALIDVODUSER = 'FailedOperation.InvalidVodUser'
# 媒体被系统封禁。
FAILEDOPERATION_MEDIAFORBIDEDBYSYSTEM = 'FailedOperation.MediaForbidedBySystem'
# 操作失败:不支持的媒体类型。
FAILEDOPERATION_MEDIATYPE = 'FailedOperation.MediaType'
# 网络错误。
FAILEDOPERATION_NETWORKERROR = 'FailedOperation.NetWorkError'
# 没有开通该接口使用权限。
FAILEDOPERATION_NOPRIVILEGES = 'FailedOperation.NoPrivileges'
# 操作失败:父类 ID 不存在。
FAILEDOPERATION_PARENTIDNOFOUND = 'FailedOperation.ParentIdNoFound'
# 操作失败:子类数量超过限制。
FAILEDOPERATION_SUBCLASSLIMITEXCEEDED = 'FailedOperation.SubclassLimitExceeded'
# 操作失败:任务重复。
FAILEDOPERATION_TASKDUPLICATE = 'FailedOperation.TaskDuplicate'
# 操作失败:上传文件到 cos 失败。
FAILEDOPERATION_UPLOADCOSFAIL = 'FailedOperation.UploadCosFail'
# 内部错误。
INTERNALERROR = 'InternalError'
# 内部错误,访问DB失败。
INTERNALERROR_DBERROR = 'InternalError.DBError'
# 内部错误:生成模板 ID 失败。
INTERNALERROR_GENDEFINITION = 'InternalError.GenDefinition'
# 内部错误:获取媒体文件信息错误。
INTERNALERROR_GETFILEINFOERROR = 'InternalError.GetFileInfoError'
# 内部错误:获取媒体列表错误。
INTERNALERROR_GETMEDIALISTERROR = 'InternalError.GetMediaListError'
# 时间解析错误。
INTERNALERROR_TIMEPARSEERROR = 'InternalError.TimeParseError'
# 内部错误:更新媒体文件信息错误。
INTERNALERROR_UPDATEMEDIAERROR = 'InternalError.UpdateMediaError'
# 内部错误:上传封面图片错误。
INTERNALERROR_UPLOADCOVERIMAGEERROR = 'InternalError.UploadCoverImageError'
# 内部错误:上传水印图片失败。
INTERNALERROR_UPLOADWATERMARKERROR = 'InternalError.UploadWatermarkError'
# 参数错误。
INVALIDPARAMETER = 'InvalidParameter'
# 任务流模板名字已存在。
INVALIDPARAMETER_EXISTEDPROCEDURENAME = 'InvalidParameter.ExistedProcedureName'
# 参数值错误:过期时间。
INVALIDPARAMETER_EXPIRETIME = 'InvalidParameter.ExpireTime'
# 任务流模板名字不存在。
INVALIDPARAMETER_PROCEDURENAMENOTEXIST = 'InvalidParameter.ProcedureNameNotExist'
# 参数值错误:存储地域。
INVALIDPARAMETER_STORAGEREGION = 'InvalidParameter.StorageRegion'
# 参数取值错误。
INVALIDPARAMETERVALUE = 'InvalidParameterValue'
# 参数值错误:AddKeyFrameDescs 与 ClearKeyFrameDescs 参数冲突。
INVALIDPARAMETERVALUE_ADDKEYFRAMEDESCSANDCLEARKEYFRAMEDESCSCONFLICT = 'InvalidParameterValue.AddKeyFrameDescsAndClearKeyFrameDescsConflict'
# 参数值错误:AddKeyFrameDescs 与 DeleteKeyFrameDescs 参数冲突。
INVALIDPARAMETERVALUE_ADDKEYFRAMEDESCSANDDELETEKEYFRAMEDESCSCONFLICT = 'InvalidParameterValue.AddKeyFrameDescsAndDeleteKeyFrameDescsConflict'
# 参数值错误:AddTags 与 ClearTags 参数冲突。
INVALIDPARAMETERVALUE_ADDTAGSANDCLEARTAGSCONFLICT = 'InvalidParameterValue.AddTagsAndClearTagsConflict'
# 参数值错误:AddTags 与 DeleteTags 参数冲突。
INVALIDPARAMETERVALUE_ADDTAGSANDDELETETAGSCONFLICT = 'InvalidParameterValue.AddTagsAndDeleteTagsConflict'
# 参数值错误:AI 分析 Definition。
INVALIDPARAMETERVALUE_AIANALYSISTASKDEFINITION = 'InvalidParameterValue.AiAnalysisTaskDefinition'
# 参数值错误:AI 内容审核 Definition。
INVALIDPARAMETERVALUE_AICONTENTREVIEWTASKDEFINITION = 'InvalidParameterValue.AiContentReviewTaskDefinition'
# 参数值错误:AI 识别 Definition。
INVALIDPARAMETERVALUE_AIRECOGNITIONTASKDEFINITION = 'InvalidParameterValue.AiRecognitionTaskDefinition'
# Area 参数错误。
INVALIDPARAMETERVALUE_AREA = 'InvalidParameterValue.Area'
# 参数错误:音频流码率。
INVALIDPARAMETERVALUE_AUDIOBITRATE = 'InvalidParameterValue.AudioBitrate'
# 参数值错误:AudioChannel。
INVALIDPARAMETERVALUE_AUDIOCHANNEL = 'InvalidParameterValue.AudioChannel'
# 参数错误:音频流编码格式。
INVALIDPARAMETERVALUE_AUDIOCODEC = 'InvalidParameterValue.AudioCodec'
# 参数错误:音频流采样率。
INVALIDPARAMETERVALUE_AUDIOSAMPLERATE = 'InvalidParameterValue.AudioSampleRate'
# 无效的音频/视频码率。
INVALIDPARAMETERVALUE_BITRATE = 'InvalidParameterValue.Bitrate'
# 参数值错误:BlockConfidence 参数取值非法。
INVALIDPARAMETERVALUE_BLOCKCONFIDENCE = 'InvalidParameterValue.BlockConfidence'
# 无效的文件类型。
INVALIDPARAMETERVALUE_CATEGORIES = 'InvalidParameterValue.Categories'
# 参数值错误:分类 ID。
INVALIDPARAMETERVALUE_CLASSID = 'InvalidParameterValue.ClassId'
# 参数值错误:ClassIds 无效。
INVALIDPARAMETERVALUE_CLASSIDS = 'InvalidParameterValue.ClassIds'
# 参数值错误:ClassName 无效。
INVALIDPARAMETERVALUE_CLASSNAME = 'InvalidParameterValue.ClassName'
# 智能分类控制字段参数错误。
INVALIDPARAMETERVALUE_CLASSIFCATIONCONFIGURE = 'InvalidParameterValue.ClassifcationConfigure'
# 参数值错误:裁剪时间段太长。
INVALIDPARAMETERVALUE_CLIPDURATION = 'InvalidParameterValue.ClipDuration'
# 无效的音频/视频编编码格式。
INVALIDPARAMETERVALUE_CODEC = 'InvalidParameterValue.Codec'
# 参数值错误:ColumnCount。
INVALIDPARAMETERVALUE_COLUMNCOUNT = 'InvalidParameterValue.ColumnCount'
# 参数错误:对该模板的描述。
INVALIDPARAMETERVALUE_COMMENT = 'InvalidParameterValue.Comment'
# 参数错误:封装格式。
INVALIDPARAMETERVALUE_CONTAINER = 'InvalidParameterValue.Container'
# 参数值错误:ContainerType。
INVALIDPARAMETERVALUE_CONTAINERTYPE = 'InvalidParameterValue.ContainerType'
# 参数值错误:CoordinateOrigin。
INVALIDPARAMETERVALUE_COORDINATEORIGIN = 'InvalidParameterValue.CoordinateOrigin'
# 智能封面控制字段参数错误。
INVALIDPARAMETERVALUE_COVERCONFIGURE = 'InvalidParameterValue.CoverConfigure'
# 参数值错误:封面类型。
INVALIDPARAMETERVALUE_COVERTYPE = 'InvalidParameterValue.CoverType'
# 参数值错误:封面 URL。
INVALIDPARAMETERVALUE_COVERURL = 'InvalidParameterValue.CoverUrl'
# 参数值错误:CutAndCrops 参数取值非法。
INVALIDPARAMETERVALUE_CUTANDCROPS = 'InvalidParameterValue.CutAndCrops'
# 参数值错误,时间粒度。
INVALIDPARAMETERVALUE_DATAINTERVAL = 'InvalidParameterValue.DataInterval'
# 参数值错误,数据类型。
INVALIDPARAMETERVALUE_DATATYPE = 'InvalidParameterValue.DataType'
# 参数值错误:Date。
INVALIDPARAMETERVALUE_DATE = 'InvalidParameterValue.Date'
# 参数值错误:人脸默认库过滤标签非法。
INVALIDPARAMETERVALUE_DEFAULTLIBRARYLABELSET = 'InvalidParameterValue.DefaultLibraryLabelSet'
# 参数错误:Definition。
INVALIDPARAMETERVALUE_DEFINITION = 'InvalidParameterValue.Definition'
# 参数错误:Definitions。
INVALIDPARAMETERVALUE_DEFINITIONS = 'InvalidParameterValue.Definitions'
# 参数值错误:不允许删除默认模板。
INVALIDPARAMETERVALUE_DELETEDEFAULTTEMPLATE = 'InvalidParameterValue.DeleteDefaultTemplate'
# 参数值错误:Description 超过长度限制。
INVALIDPARAMETERVALUE_DESCRIPTION = 'InvalidParameterValue.Description'
# 无效的禁止码率低转高开关值。
INVALIDPARAMETERVALUE_DISABLEHIGHERVIDEOBITRATE = 'InvalidParameterValue.DisableHigherVideoBitrate'
# 无效的禁止分辨率低转高开关值。
INVALIDPARAMETERVALUE_DISABLEHIGHERVIDEORESOLUTION = 'InvalidParameterValue.DisableHigherVideoResolution'
# Districts 参数值错误。
INVALIDPARAMETERVALUE_DISTRICTS = 'InvalidParameterValue.Districts'
# 参数错误:不存在的域名。
INVALIDPARAMETERVALUE_DOMAINNAME = 'InvalidParameterValue.DomainName'
# 参数值,域名列表太大。
INVALIDPARAMETERVALUE_DOMAINNAMES = 'InvalidParameterValue.DomainNames'
# 无效的DRM类型。
INVALIDPARAMETERVALUE_DRMTYPE = 'InvalidParameterValue.DrmType'
# 参数值错误:EndDate 无效。
INVALIDPARAMETERVALUE_ENDDATE = 'InvalidParameterValue.EndDate'
# 参数值错误:EndTime 无效。
INVALIDPARAMETERVALUE_ENDTIME = 'InvalidParameterValue.EndTime'
# 参数错误:无效的结束时间。
INVALIDPARAMETERVALUE_ENDTIMEOFFSET = 'InvalidParameterValue.EndTimeOffset'
# 参数值错误:ExpireTime 格式错误。
INVALIDPARAMETERVALUE_EXPIRETIME = 'InvalidParameterValue.ExpireTime'
# 参数值错误:人脸重复。
INVALIDPARAMETERVALUE_FACEDUPLICATE = 'InvalidParameterValue.FaceDuplicate'
# 参数值错误:人脸库参数非法。
INVALIDPARAMETERVALUE_FACELIBRARY = 'InvalidParameterValue.FaceLibrary'
# 参数值错误:人脸分数参数取值非法。
INVALIDPARAMETERVALUE_FACESCORE = 'InvalidParameterValue.FaceScore'
# FileId 不存在。
INVALIDPARAMETERVALUE_FILEID = 'InvalidParameterValue.FileId'
# FileIds 参数错误。
INVALIDPARAMETERVALUE_FILEIDS = 'InvalidParameterValue.FileIds'
# FileIds 数组为空。
INVALIDPARAMETERVALUE_FILEIDSEMPTY = 'InvalidParameterValue.FileIdsEmpty'
# 参数值错误:FileId 过多。
INVALIDPARAMETERVALUE_FILEIDSTOOMANY = 'InvalidParameterValue.FileIdsTooMany'
# 错误的视频类型。
INVALIDPARAMETERVALUE_FILETYPE = 'InvalidParameterValue.FileType'
# 参数错误:填充方式错误。
INVALIDPARAMETERVALUE_FILLTYPE = 'InvalidParameterValue.FillType'
# 参数错误:是否去除视频,应为0或1。
INVALIDPARAMETERVALUE_FILTRATEAUDIO = 'InvalidParameterValue.FiltrateAudio'
# 参数错误:去除视频。
INVALIDPARAMETERVALUE_FILTRATEVIDEO = 'InvalidParameterValue.FiltrateVideo'
# 参数值错误:Format。
INVALIDPARAMETERVALUE_FORMAT = 'InvalidParameterValue.Format'
# 参数值错误:Format 为 webp 时,Width、Height 均为空。
INVALIDPARAMETERVALUE_FORMATWEBPLACKWIDTHANDHEIGHT = 'InvalidParameterValue.FormatWebpLackWidthAndHeight'
# 参数值错误:Format 为 webp 时,不允许 Width、Height 都为 0。
INVALIDPARAMETERVALUE_FORMATWEBPWIDTHANDHEIGHTBOTHZERO = 'InvalidParameterValue.FormatWebpWidthAndHeightBothZero'
# 参数错误:视频帧率。
INVALIDPARAMETERVALUE_FPS = 'InvalidParameterValue.Fps'
# 智能按帧标签控制字段参数错误。
INVALIDPARAMETERVALUE_FRAMETAGCONFIGURE = 'InvalidParameterValue.FrameTagConfigure'
# 参数值错误:FunctionArg。
INVALIDPARAMETERVALUE_FUNCTIONARG = 'InvalidParameterValue.FunctionArg'
# 参数值错误:FunctionName。
INVALIDPARAMETERVALUE_FUNCTIONNAME = 'InvalidParameterValue.FunctionName'
# 参数错误:高度。
INVALIDPARAMETERVALUE_HEIGHT = 'InvalidParameterValue.Height'
# 智能精彩集锦控制参数错误。
INVALIDPARAMETERVALUE_HIGHLIGHTCONFIGURE = 'InvalidParameterValue.HighlightConfigure'
# ImageContent参数值无效。
INVALIDPARAMETERVALUE_IMAGECONTENT = 'InvalidParameterValue.ImageContent'
# 图片解 Base64 编码失败。
INVALIDPARAMETERVALUE_IMAGEDECODEERROR = 'InvalidParameterValue.ImageDecodeError'
# 参数错误:图片水印模板。
INVALIDPARAMETERVALUE_IMAGETEMPLATE = 'InvalidParameterValue.ImageTemplate'
# 参数错误:无效的操作类型。
INVALIDPARAMETERVALUE_INVALIDOPERATIONTYPE = 'InvalidParameterValue.InvalidOperationType'
# Isps 参数错误。
INVALIDPARAMETERVALUE_ISPS = 'InvalidParameterValue.Isps'
# 参数值错误:打点信息内容过长。
INVALIDPARAMETERVALUE_KEYFRAMEDESCCONTENTTOOLONG = 'InvalidParameterValue.KeyFrameDescContentTooLong'
# 参数值错误:LabelSet 参数取值非法。
INVALIDPARAMETERVALUE_LABELSET = 'InvalidParameterValue.LabelSet'
# 参数错误:Limit。
INVALIDPARAMETERVALUE_LIMIT = 'InvalidParameterValue.Limit'
# 参数值错误:Limit 过大。
INVALIDPARAMETERVALUE_LIMITTOOLARGE = 'InvalidParameterValue.LimitTooLarge'
# 参数取值错误:MediaManifestContent。
INVALIDPARAMETERVALUE_MEDIAMANIFESTCONTENT = 'InvalidParameterValue.MediaManifestContent'
# 参数值错误:媒体类型。
INVALIDPARAMETERVALUE_MEDIATYPE = 'InvalidParameterValue.MediaType'
# 参数值错误:媒体文件 URL。
INVALIDPARAMETERVALUE_MEDIAURL = 'InvalidParameterValue.MediaUrl'
# Metric 参数错误。
INVALIDPARAMETERVALUE_METRIC = 'InvalidParameterValue.Metric'
# 参数值错误:不允许修改默认模板。
INVALIDPARAMETERVALUE_MODIFYDEFAULTTEMPLATE = 'InvalidParameterValue.ModifyDefaultTemplate'
# 参数值错误:Name 超过长度限制。
INVALIDPARAMETERVALUE_NAME = 'InvalidParameterValue.Name'
# 无效的文件名前缀。
INVALIDPARAMETERVALUE_NAMEPREFIXES = 'InvalidParameterValue.NamePrefixes'
# Names数组中元素过多。
INVALIDPARAMETERVALUE_NAMES = 'InvalidParameterValue.Names'
# 参数值错误:物体库参数非法。
INVALIDPARAMETERVALUE_OBJECTLIBRARY = 'InvalidParameterValue.ObjectLibrary'
# 参数值错误:Offset 无效。
INVALIDPARAMETERVALUE_OFFSET = 'InvalidParameterValue.Offset'
# 参数值错误:Offset 过大。
INVALIDPARAMETERVALUE_OFFSETTOOLARGE = 'InvalidParameterValue.OffsetTooLarge'
# 参数值错误:Operation 无效。
INVALIDPARAMETERVALUE_OPERATION = 'InvalidParameterValue.Operation'
# 参数值错误:ParentId 无效。
INVALIDPARAMETERVALUE_PARENTID = 'InvalidParameterValue.ParentId'
# 参数值错误:人脸图片格式错误。
INVALIDPARAMETERVALUE_PICFORMATERROR = 'InvalidParameterValue.PicFormatError'
# 任务流模板名无效。
INVALIDPARAMETERVALUE_PROCEDURENAME = 'InvalidParameterValue.ProcedureName'
# 参数值错误:Quality。
INVALIDPARAMETERVALUE_QUALITY = 'InvalidParameterValue.Quality'
# 参数值错误:RemoveAudio。
INVALIDPARAMETERVALUE_REMOVEAUDIO = 'InvalidParameterValue.RemoveAudio'
# 参数值错误:RemoveVideo。
INVALIDPARAMETERVALUE_REMOVEVIDEO = 'InvalidParameterValue.RemoveVideo'
# 参数错误:RepeatType 无效。
INVALIDPARAMETERVALUE_REPEATTYPE = 'InvalidParameterValue.RepeatType'
# 参数错误:分辨率错误。
INVALIDPARAMETERVALUE_RESOLUTION = 'InvalidParameterValue.Resolution'
# 无效的ResolutionAdaptive。
INVALIDPARAMETERVALUE_RESOLUTIONADAPTIVE = 'InvalidParameterValue.ResolutionAdaptive'
# 参数值错误:ReviewConfidence 参数取值非法。
INVALIDPARAMETERVALUE_REVIEWCONFIDENCE = 'InvalidParameterValue.ReviewConfidence'
# 参数值错误:ReviewWallSwitch 参数取值非法。
INVALIDPARAMETERVALUE_REVIEWWALLSWITCH = 'InvalidParameterValue.ReviewWallSwitch'
# 参数值错误:RowCount。
INVALIDPARAMETERVALUE_ROWCOUNT = 'InvalidParameterValue.RowCount'
# 参数值错误:SampleInterval。
INVALIDPARAMETERVALUE_SAMPLEINTERVAL = 'InvalidParameterValue.SampleInterval'
# 无效的音频采样率。
INVALIDPARAMETERVALUE_SAMPLERATE = 'InvalidParameterValue.SampleRate'
# 参数值错误:SampleType。
INVALIDPARAMETERVALUE_SAMPLETYPE = 'InvalidParameterValue.SampleType'
# 参数值错误:ScreenshotInterval 参数取值非法。
INVALIDPARAMETERVALUE_SCREENSHOTINTERVAL = 'InvalidParameterValue.ScreenshotInterval'
# SessionContext 过长。
INVALIDPARAMETERVALUE_SESSIONCONTEXTTOOLONG = 'InvalidParameterValue.SessionContextTooLong'
# 去重识别码重复,请求被去重。
INVALIDPARAMETERVALUE_SESSIONID = 'InvalidParameterValue.SessionId'
# SessionId 过长。
INVALIDPARAMETERVALUE_SESSIONIDTOOLONG = 'InvalidParameterValue.SessionIdTooLong'
# 参数值错误:Sort 无效。
INVALIDPARAMETERVALUE_SORT = 'InvalidParameterValue.Sort'
# 参数错误:音频通道方式。
INVALIDPARAMETERVALUE_SOUNDSYSTEM = 'InvalidParameterValue.SoundSystem'
# SourceDefinition 错误,请检查媒体文件是否有对应的转码。
INVALIDPARAMETERVALUE_SOURCEDEFINITION = 'InvalidParameterValue.SourceDefinition'
# 参数值错误:SourceType 无效。
INVALIDPARAMETERVALUE_SOURCETYPE = 'InvalidParameterValue.SourceType'
# 未知的媒体文件来源。
INVALIDPARAMETERVALUE_SOURCETYPES = 'InvalidParameterValue.SourceTypes'
# 参数值错误:StartDate 无效。
INVALIDPARAMETERVALUE_STARTDATE = 'InvalidParameterValue.StartDate'
# 参数值错误:StartTime 无效。
INVALIDPARAMETERVALUE_STARTTIME = 'InvalidParameterValue.StartTime'
# 参数错误:无效的起始时间。
INVALIDPARAMETERVALUE_STARTTIMEOFFSET = 'InvalidParameterValue.StartTimeOffset'
# 参数值错误:人工确认结果取值非法。
INVALIDPARAMETERVALUE_STATUS = 'InvalidParameterValue.Status'
# 参数值错误:存储地域。
INVALIDPARAMETERVALUE_STORAGEREGION = 'InvalidParameterValue.StorageRegion'
# 参数值错误:StorageRegions 无效。
INVALIDPARAMETERVALUE_STORAGEREGIONS = 'InvalidParameterValue.StorageRegions'
# 参数值错误:StorageType。
INVALIDPARAMETERVALUE_STORAGETYPE = 'InvalidParameterValue.StorageType'
# 参数值错误:StreamId无效。
INVALIDPARAMETERVALUE_STREAMIDINVALID = 'InvalidParameterValue.StreamIdInvalid'
# 无效的流ID参数。
INVALIDPARAMETERVALUE_STREAMIDS = 'InvalidParameterValue.StreamIds'
# 参数值错误:子应用 ID。
INVALIDPARAMETERVALUE_SUBAPPID = 'InvalidParameterValue.SubAppId'
# 参数值错误:SubtitleFormat 参数非法。
INVALIDPARAMETERVALUE_SUBTITLEFORMAT = 'InvalidParameterValue.SubtitleFormat'
# 参数值错误:SVG 为空。
INVALIDPARAMETERVALUE_SVGTEMPLATE = 'InvalidParameterValue.SvgTemplate'
# 参数值错误:SVG 高度。
INVALIDPARAMETERVALUE_SVGTEMPLATEHEIGHT = 'InvalidParameterValue.SvgTemplateHeight'
# 参数值错误:SVG 宽度。
INVALIDPARAMETERVALUE_SVGTEMPLATEWIDTH = 'InvalidParameterValue.SvgTemplateWidth'
# 参数值错误:Switch 参数取值非法。
INVALIDPARAMETERVALUE_SWITCH = 'InvalidParameterValue.Switch'
# 参数值错误:TEHD Type 无效。
INVALIDPARAMETERVALUE_TEHDTYPE = 'InvalidParameterValue.TEHDType'
# 智能标签控制字段参数错误。
INVALIDPARAMETERVALUE_TAGCONFIGURE = 'InvalidParameterValue.TagConfigure'
# 参数值错误:标签过长。
INVALIDPARAMETERVALUE_TAGTOOLONG = 'InvalidParameterValue.TagTooLong'
# 参数值错误:Tags 无效。
INVALIDPARAMETERVALUE_TAGS = 'InvalidParameterValue.Tags'
# 任务 ID 不存在。
INVALIDPARAMETERVALUE_TASKID = 'InvalidParameterValue.TaskId'
# 参数值错误:搜索文本。
INVALIDPARAMETERVALUE_TEXT = 'InvalidParameterValue.Text'
# 参数错误:文字透明度。
INVALIDPARAMETERVALUE_TEXTALPHA = 'InvalidParameterValue.TextAlpha'
# 参数错误:文字模板。
INVALIDPARAMETERVALUE_TEXTTEMPLATE = 'InvalidParameterValue.TextTemplate'
# 参数值错误:Thumbnail 参数取值非法。
INVALIDPARAMETERVALUE_THUMBNAILS = 'InvalidParameterValue.Thumbnails'
# 参数值错误:TimeType。
INVALIDPARAMETERVALUE_TIMETYPE = 'InvalidParameterValue.TimeType'
# Type 参数值错误。
INVALIDPARAMETERVALUE_TYPE = 'InvalidParameterValue.Type'
# 无效的 Types 参数。
INVALIDPARAMETERVALUE_TYPES = 'InvalidParameterValue.Types'
# 去重识别码一天内重复,请求被去重。
INVALIDPARAMETERVALUE_UNIQUEIDENTIFIER = 'InvalidParameterValue.UniqueIdentifier'
# 参数错误:无效的Url。
INVALIDPARAMETERVALUE_URL = 'InvalidParameterValue.Url'
# 参数值错误:人脸用户自定义库过滤标签非法。
INVALIDPARAMETERVALUE_USERDEFINELIBRARYLABELSET = 'InvalidParameterValue.UserDefineLibraryLabelSet'
# 参数错误:vcrf。
INVALIDPARAMETERVALUE_VCRF = 'InvalidParameterValue.Vcrf'
# 参数错误:视频流码率。
INVALIDPARAMETERVALUE_VIDEOBITRATE = 'InvalidParameterValue.VideoBitrate'
# 参数错误:视频流的编码格式。
INVALIDPARAMETERVALUE_VIDEOCODEC = 'InvalidParameterValue.VideoCodec'
# 无效的 Vids 参数。
INVALIDPARAMETERVALUE_VIDS = 'InvalidParameterValue.Vids'
# 参数值错误:点播会话。
INVALIDPARAMETERVALUE_VODSESSIONKEY = 'InvalidParameterValue.VodSessionKey'
# 参数值错误:Watermarks 参数取值非法。
INVALIDPARAMETERVALUE_WATERMARKS = 'InvalidParameterValue.Watermarks'
# 参数错误:宽度。
INVALIDPARAMETERVALUE_WIDTH = 'InvalidParameterValue.Width'
# 水印原点距离视频图像坐标原点的水平位置。支持 %、px 两种格式。
INVALIDPARAMETERVALUE_XPOS = 'InvalidParameterValue.XPos'
# 水印原点距离视频图像坐标原点的垂直位置。支持 %、px 两种格式。
INVALIDPARAMETERVALUE_YPOS = 'InvalidParameterValue.YPos'
# 超过配额限制。
LIMITEXCEEDED = 'LimitExceeded'
# 超过限制值:新旧打点信息个数和超过限制值。
LIMITEXCEEDED_KEYFRAMEDESCCOUNTREACHMAX = 'LimitExceeded.KeyFrameDescCountReachMax'
# 超过限制值:新旧标签个数和超过限制值。
LIMITEXCEEDED_TAGCOUNTREACHMAX = 'LimitExceeded.TagCountReachMax'
# 超过限制值:模板数超限。
LIMITEXCEEDED_TOOMUCHTEMPLATE = 'LimitExceeded.TooMuchTemplate'
# 资源不存在。
RESOURCENOTFOUND = 'ResourceNotFound'
# 资源不存在:封面不存在。
RESOURCENOTFOUND_COVERURL = 'ResourceNotFound.CoverUrl'
# 资源不存在:文件不存在。
RESOURCENOTFOUND_FILENOTEXIST = 'ResourceNotFound.FileNotExist'
# 资源不存在:人物。
RESOURCENOTFOUND_PERSON = 'ResourceNotFound.Person'
# 资源不存在:模板不存在。
RESOURCENOTFOUND_TEMPLATENOTEXIST = 'ResourceNotFound.TemplateNotExist'
# 用户不存在。
RESOURCENOTFOUND_USERNOTEXIST = 'ResourceNotFound.UserNotExist'
# 资源不存在:关键词。
RESOURCENOTFOUND_WORD = 'ResourceNotFound.Word'
# 参数错误:不支持MasterPlaylist的M3u8。
RESOURCEUNAVAILABLE_MASTERPLAYLIST = 'ResourceUnavailable.MasterPlaylist'
# 未授权操作。
UNAUTHORIZEDOPERATION = 'UnauthorizedOperation'
# 未知参数错误。
UNKNOWNPARAMETER = 'UnknownParameter'
# 操作不支持。
UNSUPPORTEDOPERATION = 'UnsupportedOperation'
# 不支持删除非空分类。
UNSUPPORTEDOPERATION_CLASSNOTEMPTY = 'UnsupportedOperation.ClassNotEmpty'
| [
"[email protected]"
] | |
42e5956217bb73d7bf84ce47a3cd84c808b6c11f | 2130aa6efd199c612b03b0cd949375dd828dd218 | /acoustid/data/submission.py | b3897ac10f2f83bd8c45d4bea70e680730d28066 | [
"MIT"
] | permissive | bazo/acoustid-server | 4774965b8a16555100c972c09582bb09ea10df3f | 56b11f1bbd093e23970d9baae2a2655ecea34aee | refs/heads/master | 2020-05-27T21:08:29.078822 | 2017-01-02T20:19:42 | 2017-01-02T20:19:42 | 83,599,159 | 1 | 0 | null | 2017-03-01T20:36:20 | 2017-03-01T20:36:20 | null | UTF-8 | Python | false | false | 6,283 | py | # Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
import logging
from sqlalchemy import sql
from acoustid import tables as schema, const
from acoustid.data.fingerprint import lookup_fingerprint, insert_fingerprint, inc_fingerprint_submission_count, FingerprintSearcher
from acoustid.data.musicbrainz import resolve_mbid_redirect
from acoustid.data.track import insert_track, insert_mbid, insert_puid, merge_tracks, insert_track_meta, can_add_fp_to_track, can_merge_tracks, insert_track_foreignid
logger = logging.getLogger(__name__)
def insert_submission(conn, data):
"""
Insert a new submission into the database
"""
with conn.begin():
insert_stmt = schema.submission.insert().values({
'fingerprint': data['fingerprint'],
'length': data['length'],
'bitrate': data.get('bitrate'),
'mbid': data.get('mbid'),
'puid': data.get('puid'),
'source_id': data.get('source_id'),
'format_id': data.get('format_id'),
'meta_id': data.get('meta_id'),
'foreignid_id': data.get('foreignid_id'),
})
id = conn.execute(insert_stmt).inserted_primary_key[0]
logger.debug("Inserted submission %r with data %r", id, data)
return id
def import_submission(conn, submission, index=None):
"""
Import the given submission into the main fingerprint database
"""
with conn.begin():
update_stmt = schema.submission.update().where(
schema.submission.c.id == submission['id'])
conn.execute(update_stmt.values(handled=True))
mbids = []
if submission['mbid']:
mbids.append(resolve_mbid_redirect(conn, submission['mbid']))
logger.info("Importing submission %d with MBIDs %s",
submission['id'], ', '.join(mbids))
num_unique_items = len(set(submission['fingerprint']))
if num_unique_items < const.FINGERPRINT_MIN_UNIQUE_ITEMS:
logger.info("Skipping, has only %d unique items", num_unique_items)
return
num_query_items = conn.execute("SELECT icount(acoustid_extract_query(%(fp)s))", dict(fp=submission['fingerprint']))
if not num_query_items:
logger.info("Skipping, no data to index")
return
searcher = FingerprintSearcher(conn, index, fast=False)
searcher.min_score = const.TRACK_MERGE_THRESHOLD
matches = searcher.search(submission['fingerprint'], submission['length'])
fingerprint = {
'id': None,
'track_id': None,
'fingerprint': submission['fingerprint'],
'length': submission['length'],
'bitrate': submission['bitrate'],
'format_id': submission['format_id'],
}
if matches:
match = matches[0]
all_track_ids = set()
possible_track_ids = set()
for m in matches:
if m['track_id'] in all_track_ids:
continue
all_track_ids.add(m['track_id'])
logger.debug("Fingerprint %d with track %d is %d%% similar", m['id'], m['track_id'], m['score'] * 100)
if can_add_fp_to_track(conn, m['track_id'], submission['fingerprint'], submission['length']):
possible_track_ids.add(m['track_id'])
if not fingerprint['track_id']:
fingerprint['track_id'] = m['track_id']
if m['score'] > const.FINGERPRINT_MERGE_THRESHOLD:
fingerprint['id'] = m['id']
if len(possible_track_ids) > 1:
for group in can_merge_tracks(conn, possible_track_ids):
if fingerprint['track_id'] in group and len(group) > 1:
fingerprint['track_id'] = min(group)
group.remove(fingerprint['track_id'])
merge_tracks(conn, fingerprint['track_id'], list(group))
break
if not fingerprint['track_id']:
fingerprint['track_id'] = insert_track(conn)
if not fingerprint['id']:
fingerprint['id'] = insert_fingerprint(conn, fingerprint, submission['id'], submission['source_id'])
else:
inc_fingerprint_submission_count(conn, fingerprint['id'], submission['id'], submission['source_id'])
for mbid in mbids:
insert_mbid(conn, fingerprint['track_id'], mbid, submission['id'], submission['source_id'])
if submission['puid'] and submission['puid'] != '00000000-0000-0000-0000-000000000000':
insert_puid(conn, fingerprint['track_id'], submission['puid'], submission['id'], submission['source_id'])
if submission['meta_id']:
insert_track_meta(conn, fingerprint['track_id'], submission['meta_id'], submission['id'], submission['source_id'])
if submission['foreignid_id']:
insert_track_foreignid(conn, fingerprint['track_id'], submission['foreignid_id'], submission['id'], submission['source_id'])
return fingerprint
def import_queued_submissions(conn, index=None, limit=100, ids=None):
"""
Import the given submission into the main fingerprint database
"""
query = schema.submission.select(schema.submission.c.handled == False).\
order_by(schema.submission.c.mbid.nullslast(), schema.submission.c.id.desc())
if ids is not None:
query = query.where(schema.submission.c.id.in_(ids))
if limit is not None:
query = query.limit(limit)
count = 0
for submission in conn.execute(query):
import_submission(conn, submission, index=index)
count += 1
logger.debug("Imported %d submissions", count)
return count
def lookup_submission_status(db, ids):
if not ids:
return {}
source = schema.fingerprint_source.\
join(schema.fingerprint).\
join(schema.track)
query = sql.select([schema.fingerprint_source.c.submission_id, schema.track.c.gid], from_obj=source).\
where(schema.fingerprint_source.c.submission_id.in_(ids))
results = {}
for id, track_gid in db.execute(query):
results[id] = track_gid
return results
| [
"[email protected]"
] | |
ef26e4572a36ca6d9678ccc256904ec09d6b3688 | 2cff704d26840ca5d4f543c30acf0beb6edadda5 | /rig/exp_functions.py | c09f6b3b7190adb8e432a2e53d1fa1f93707da6c | [
"MIT"
] | permissive | tgbugs/mlab | ff7b8ae366bb6abf5b71e39f020cc80b4079e774 | dacc1663cbe714bb45c31b1b133fddb7ebcf5c79 | refs/heads/master | 2020-04-09T10:33:24.335267 | 2016-05-03T23:18:33 | 2016-05-03T23:18:33 | 12,688,098 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,126 | py | import re
import datetime
import inspect as ins
from sys import stdout
from time import sleep
from debug import TDB,ploc
try:
import rpdb2
except:
pass
tdb=TDB()
printD=tdb.printD
printFD=tdb.printFuncDict
tdbOff=tdb.tdbOff
#file to consolidate all the different functions I want to execute using the xxx.Control classes
#TODO this file needs a complete rework so that it can pass data to the database AND so that it can be used by keyboard AND so that it can be used by experiment scripts... means I may need to split stuff up? ;_;
#TODO rig control vs experiment control... these are technically two different 'modes' one is keyboard controlled the other is keyboard initiated...
#TODO ideally I want to do experiments the same way every time instead of allowing one part here and another there which is sloppy so those are highly ordered...
#TODO BUT I need a way to fix things, for example if the slice moves and I need to recalibrate the slice position (FUCK, how is THAT going to work out in metadata)
#TODO all of these are configured for terminal output only ATM, ideally they should be configged by whether they are called from keyboard or from experiment... that seems... reasonable??! not very orthogonal...
#mostly because when I'm running an experiment I don't want to accientally hit something or cause an error
#TODO split in to send and recieve?!?
#TODO datasource/expected datasource mismatch
class kCtrlObj:
"""key controller object"""
def __init__(self, modestate, controller=lambda:None):
self.charBuffer=modestate.charBuffer
self.keyHandler=modestate.keyHandler
#I probably do not need to pass key handler to thing outside of inputManager...
#yep, not used anywhere, but I supose it could be used for submodes... we'll leave it in
self.setMode=modestate.setMode
self.updateModeDict=modestate.updateModeDict
self.__mode__=self.__class__.__name__
self.keyThread=modestate.keyThread
self.ikCtrlDict=modestate.ikCtrlDict
self.controller=controller
self.initController(self.controller)
def reloadControl(self): #this wont work because it wont write or something....
printD('reiniting controller')
rpdb2.setbreak()
try:
self.ctrl.cleanup()
del(self.ctrl)
from mcc import mccControl
self.ctrl=Control()
self.ikCtrlDict[self.__mode__]=self
self.updateModeDict()
except:
printD('FAILURE')
raise IOError
return self
def initController(self,controller):
try:
self.ctrl=controller()
print('[OK]',controller.__name__,'started')
except:
print('[!] **LOAD ERROR**',controller.__name__,'not started, will listen for start')
self.ctrl=lambda:None
from threading import Thread
#self.pollThrd=Thread(target=self.pollForCtrl,args=(controller,))
#self.pollThrd.start()
self.ikCtrlDict[self.__mode__]=self
self.updateModeDict()
def pollForCtrl(self,controller): #FIXME maybe we SHOULD do this here since these are more tightly integrated with modestate
while self.keyThread.is_alive():
try:
self.ctrl=controller()
printD(self)
print('[OK]',controller.__name__,'started')
#printD(self.__mode__)
#self.ikCtrlDict[self.__mode__]=self
self.updateModeDict()
break
except:
sleep(2)
def wrapDoneCB(self):
class wrap:
def __init__(self,call,pre=lambda:None,post=lambda:None):
self.start=pre
self.do=call
self.stop=post
def go(self,*args):
#printD('wat')
self.start()
out=self.do(*args)
self.stop()
return out
excluded=['cleanup','__init__','doneCB','readProgDict','updateModeDict','setMode']
mems=ins.getmembers(self)
funcs=[func for func in mems if ins.ismethod(func[1]) and func[0] not in excluded]
#printFD(funcs)
for tup in funcs:
setattr(self,tup[0],wrap(tup[1],self.doneCB).go)
def cleanup(self):
pass
class clxFuncs(kCtrlObj):
def __init__(self, modestate):
from clx import clxControl
super().__init__(modestate,clxControl)
#self.initController(clxmsg)
#printD('clx ctrl',self.ctrl)
#self.clxCleanup=self.cleanup
self.programDict={}
#self.wrapDoneCB()
#class only
def readProgDict(self,progDict):
self.programDict=progDict
return self
def cleanup(self):
super().cleanup()
try:
self.ctrl.DestroyObject()
print(self.ctrl.__class__,'handler destroyed')
except:
pass
#print('this this works the way it is supposed to the we should never have to destory the object')
#input with output
def getStatus(self,outputs): #TODO outputs... should be able to output to as many things as I want... probably should be a callback to simplify things elsewhere? no?!?!
status=self.ctrl.GetStatus()
print(status)
return self
def load(self,key=None):
if not key:
print('Please enter the program to load')
self.keyHandler(1)
key=self.charBuffer.get()
try:
path=self.programDict[key]
#printD(path)
self.ctrl.LoadProtocol(path.encode('ascii'))
except:
print('Program not found')
raise
return self
#input only
def startMembTest(self):
self.ctrl.StartMembTest(120)
self.ctrl.StartMembTest(121)
return self
class datFuncs(kCtrlObj):
#interface with the database TODO this should be able to run independently?
"""Put ANYTHING permanent that might be data in here"""
def __init__(self, modestate):
#from database.models import * #DAMNIT FIXME
super().__init__(modestate)
self.markDict={}
self.posDict={}
self.MCCstateDict={}
#self.wrapDoneCB()
self.updateModeDict()
#FIXME
#this class should be the one to get data out of dataman
#dataman should have a method 'saveData' that takes the source class (self) and the data and stores it
def newExperiment(self):
return self
def newCell(self):
return self
def newSlice(self):
return self
def addMetaData(self):
return self
def addDataFile(self): #FIXME not sure this should go here...
return self
def getUserInputData(self):
"""Sadly there is still some data that I can't automatically collect"""
#get cell depths FROM SAME STARTING POINT??? measure this before expanding tissue with internal???
return self
class mccFuncs(kCtrlObj): #FIXME add a way to get the current V and I via... telegraph?
def __init__(self, modestate):
from mcc import mccControl
super().__init__(modestate,mccControl) #FIXME this needs better error messages
#self.initController(mccmsg)
self.MCCstateDict={}
#self.wrapDoneCB()
self.updateModeDict()
#associated metadata sources
self.state1DataSource=None
def inpWait(self):
#wait for keypress to move to the next program, this may need to spawn its own thread?
print('HIT ANYTHING TO ADVANCE! (not the dog, that could end poorly)')
self.keyHandler(1)
self.charBuffer.get()
return self
def getState(self): #FIXME this function and others like it should probably be called directly by dataman?
printD('hMCCmsg outer',self.ctrl.hMCCmsg)
def base():
state.append(self.ctrl.GetHoldingEnable())
state.append(self.ctrl.GetHolding())
state.append(self.ctrl.GetPrimarySignal())
state.append(self.ctrl.GetPrimarySignalGain())
state.append(self.ctrl.GetPrimarySignalLPF())
state.append(self.ctrl.GetPipetteOffset())
def vc():
base()
state.append(self.ctrl.GetFastCompCap())
state.append(self.ctrl.GetSlowCompCap())
state.append(self.ctrl.GetFastCompTau())
state.append(self.ctrl.GetSlowCompTau())
state.append(self.ctrl.GetSlowCompTauX20Enable())
def ic():
base()
state.append(self.ctrl.GetBridgeBalEnable())
state.append(self.ctrl.GetBridgeBalResist())
def iez():
base()
modeDict={0:vc,1:ic,2:iez}
stateList=[]
for i in range(self.ctrl.mcNum):
self.ctrl.selectMC(i)
state=[] #FIXME: make this a dict with keys as the name of the value? eh would probs complicate
state.append(i) #might be suprflulous but it could simplify the code to read out stateList
mode=self.ctrl.GetMode()
state.append(mode)
modeDict[mode]()
stateList.append(state)
print(state)
self.MCCstateDict[datetime.datetime.utcnow()]=stateList
return self
def printMCCstate(self):
print(re.sub('\), ',')\r\n',str(self.MCCstateDict)))
return self
def setMCState(self,MC=None,Mode=None,Holding=None,HoldingEnable=None): #TODO
#FIXME all of the experiment logic needs to be stored in one place instead of hidden in 10 files
#selectMC,SetMode,SetHolding,SetHoldingEnable,
#self.ctrl.selectMC()
return self
def allIeZ(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(2)
self.ctrl.selectMC(1)
self.ctrl.SetMode(2)
return self
def allVCnoHold(self):
#try:
self.ctrl.selectMC(0)
self.ctrl.SetMode(0)
self.ctrl.SetHoldingEnable(0)
self.ctrl.selectMC(1)
self.ctrl.SetMode(0)
self.ctrl.SetHoldingEnable(0)
#except:
#raise BaseException
return self
def allVChold_60(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(0)
self.ctrl.SetHolding(-.06)
self.ctrl.SetHoldingEnable(1)
self.ctrl.selectMC(1)
self.ctrl.SetMode(0)
self.ctrl.SetHolding(-.06)
self.ctrl.SetHoldingEnable(1)
return self
def allICnoHold(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(1)
self.ctrl.SetHoldingEnable(0)
self.ctrl.selectMC(1)
self.ctrl.SetMode(1)
self.ctrl.SetHoldingEnable(0)
return self
def testZtO_75(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(1)
self.ctrl.SetHoldingEnable(0)
self.ctrl.selectMC(1)
self.ctrl.SetMode(0)
self.ctrl.SetHolding(-.075)
self.ctrl.SetHoldingEnable(1)
return self
def testOtZ_75(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(0)
self.ctrl.SetHolding(-.075)
self.ctrl.SetHoldingEnable(1)
self.ctrl.selectMC(1)
self.ctrl.SetMode(1)
self.ctrl.SetHoldingEnable(0)
return self
def zeroVChold_60(self):
self.ctrl.selectMC(0)
self.ctrl.SetMode(0)
self.ctrl.SetHolding(-.06)
self.ctrl.SetHoldingEnable(1)
return self
def oneVChold_60(self):
self.ctrl.selectMC(1)
self.ctrl.SetMode(0)
#self.ctrl.poops(1) #awe, this is broken now due to something
self.ctrl.SetHolding(-.06)
self.ctrl.SetHoldingEnable(1)
return self
def cleanup(self):
super().cleanup()
try:
self.ctrl.DestroyObject()
print(self.ctrl.__class__,'handler destroyed')
except:
pass
class espFuncs(kCtrlObj):
def __init__(self, modestate):
from esp import espControl
super().__init__(modestate,espControl)
self.markDict={} #FIXME
self.posDict={} #FIXME
#self.initController(npControl)
self.updateModeDict()
self.modestate=modestate
self.setMoveDict()
#self.event=modestate.event
#associated metadatasources:
self.EspXDataSource=None
self.EspYDataSource=None
def getPos(self):
#may want to demand a depth input (which can be bank)
#try:
pos=self.ctrl.getPos()
#self.doneCB()
self.posDict[datetime.datetime.utcnow()]=pos #FIXME dat should handle ALL of this internally
print(pos)
#except:
#printD('oops')
#raise
return self
def setPos(self,x,y):
self.ctrl.setPos((x,y)) #FIXME may need BsetPos
def cleanup(self):
super().cleanup()
self.ctrl.cleanup()
return self
def main():
esp=espFuncs(None,None,None,None)
#mcc=mccFuncs(None,None,None,None)
if __name__=='__main__':
main()
| [
"[email protected]"
] | |
4ad97214cab242cab7be5cd4232d8eca3d8ff676 | 89d920e8de469466f45172948082284b24ee8ca6 | /sdt/bin/sdpostpipelineutils.py | 4ea778b4bbc53e16782ee12c4bdf0fc87ea83537 | [] | no_license | cedadev/synda | fb22cce909e8b4fb8e51e7ab506c337d6ec5d9d2 | 9b9fa5b9b13719e1307f093d208256e359e501af | refs/heads/master | 2021-09-24T03:56:21.545769 | 2020-09-16T10:34:44 | 2020-09-16T10:34:44 | 187,797,897 | 1 | 0 | null | 2020-01-28T12:56:15 | 2019-05-21T08:45:47 | Python | UTF-8 | Python | false | false | 1,918 | py | #!/usr/bin/env python
# -*- coding: ISO-8859-1 -*-
##################################
# @program synda
# @description climate models data transfer program
# @copyright Copyright “(c)2009 Centre National de la Recherche Scientifique CNRS.
# All Rights Reserved”
# @license CeCILL (https://raw.githubusercontent.com/Prodiguer/synda/master/sdt/doc/LICENSE)
##################################
"""This module contains post pipeline generic functions. """
import sdapp
import sdconst
from sdexception import SDException
def exists_attached_parameter(file_,name):
if 'attached_parameters' in file_:
if name in file_['attached_parameters']:
return True
else:
return False
else:
return False
def get_attached_parameter(file_,name,default=None):
if 'attached_parameters' in file_:
return file_['attached_parameters'].get(name,default)
else:
return default
def get_attached_parameter__global(files,name):
"""This function assumes all files have the same value for the <name> attribute."""
if len(files)>0:
file_=files[0] # retrieve first file's (arbitrary)
return get_attached_parameter(file_,name)
else:
return None
# the two methods below is to have some abstration over file type
def get_functional_identifier_value(f):
name=get_functional_identifier_name(f)
if name in f:
functional_id=f[name]
else:
raise SDException('SYDUTILS-020','Incorrect identifier (%s)'%name)
return functional_id
def get_functional_identifier_name(f):
if f["type"]==sdconst.SA_TYPE_FILE:
functional_id='file_functional_id'
elif f["type"]==sdconst.SA_TYPE_DATASET:
functional_id='dataset_functional_id'
else:
raise SDException('SYDUTILS-028','Incorrect type (%s)'%f["type"])
return functional_id
| [
"[email protected]"
] | |
f5c2598a311a20bb0bc5d196fce0031e4e299713 | 44a76b217c9b07f4a5df507fc405bfefefa939f6 | /Product_details/views.py | 65dee59b171e8e27198562df0879150d61f81e68 | [] | no_license | sameesayeed007/ecommercesite | 140f35a7616d79502d3aa7d3d192f859dd23f1ff | 1f832f357dc50e3e34d944d3750e07bdfd26e6ef | refs/heads/master | 2023-02-10T02:02:19.736070 | 2021-01-06T11:16:13 | 2021-01-06T11:16:13 | 327,283,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 320,265 | py | from django.shortcuts import render
from django.http import HttpResponse
from django.http.response import JsonResponse
from rest_framework.parsers import JSONParser
from rest_framework import status
import datetime
from difflib import SequenceMatcher
import json
from Intense.models import (
Product, Order, Terminal,TerminalUsers,SpecificationPrice,subtraction_track,OrderDetails,CompanyInfo, ProductPrice, Userz,User,product_delivery_area,DeliveryLocation,DeliveryArea,
BillingAddress, ProductPoint, ProductSpecification,ProductImage,SpecificationImage,
user_relation, Cupons, Comment, CommentReply, Reviews,
discount_product, Warehouse, Shop, WarehouseInfo, ShopInfo, WarehouseInfo,
inventory_report, ProductBrand, ProductCode,DeliveryInfo,Invoice,Inventory_Price,inventory_report)
from Product_details.serializers import (DeliveryInfoSerializer,MotherSpecificationSerializer,MotherDeliveryInfoCreationSerializer,MaxMinSerializer,MaxMinSerializer1,MotherCodeCreationSerializer,MotherSpecificationCreationSerializer,MotherProductImageCreationSerializer, ChildProductCreationSerializer,MaxMinSerializer,ProductDeliveryAreaSerializer, TerminalSerializer,ProductPriceSerializer, ProductPointSerializer, ProductSpecificationSerializer,
ProductSpecificationSerializerz,SSerializer,WSerializer,ProductDetailSerializer, ProductDetailSerializer1, ProductDetailSerializer2, CupponSerializer, ProductDiscountSerializer,
WarehouseSerializer,ChildSpecificationPriceSerializer,SellerSpecificationSerializer,OwnSpecificationSerializer, ShopSerializer,InventoryReportSerializer, WarehouseInfoSerializer, ShopInfoSerializer, NewWarehouseInfoSerializer, AddBrandSerializer, ProductSpecificationSerializer1)
from Product.serializers import ProductCodeSerializer
from User_details.serializers import UserSerializerz
from Cart.serializers import OrderDetailsSerializer, OrderSerializer,InvoiceSerializer
from rest_framework.decorators import api_view
from django.views.decorators.csrf import csrf_exempt
from Intense.Integral_apis import (
create_product_code,category1_data_upload
)
from datetime import datetime
from django.contrib.auth.hashers import make_password
from datetime import timedelta
from django.utils import timezone
import requests
from django.urls import reverse, reverse_lazy
from django.http import HttpResponseRedirect
from django.conf import settings
from colour import Color
from rest_framework.response import Response
from django.contrib.sites.models import Site
from datetime import date
from Intense.Integral_apis import create_user_balance,create_user_profile
import requests
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
from PIL import Image
import requests
from io import BytesIO
# import urllib2
from PIL import Image, ImageFile
site_path = "http://127.0.0.1:7000/"
own_site_path = "http://127.0.0.1:8000/"
#site_path = "https://eshoppingmall.com.bd/"
#site_path = "http://188.166.240.77:8080/"
current = date.today()
@api_view(['POST', ])
def get_colors(request, product_id):
variant = request.data.get('variant')
try:
product = Product.objects.get(id=product_id)
except:
product = None
if product:
print("product ase")
try:
product_spec = ProductSpecification.objects.filter(
product_id=product_id, weight_unit=variant,specification_status="Published")
except:
product_spec = None
if product_spec:
print("speciifcation ase")
product_colors = list(product_spec.values_list(
'color', flat=True).distinct())
else:
product_colors = []
else:
product_colors = []
return JsonResponse({'success': True, 'colors': product_colors})
@api_view(['POST', ])
def get_sizes(request, product_id):
variant = request.data.get('variant')
color = request.data.get('color')
try:
product = Product.objects.get(id=product_id)
except:
product = None
if product:
print("product ase")
try:
product_spec = ProductSpecification.objects.filter(
product_id=product_id, weight_unit=variant, color=color,specification_status="Published")
except:
product_spec = None
if product_spec:
print("speciifcation ase")
product_colors = list(product_spec.values_list(
'size', flat=True).distinct())
else:
product_colors = []
else:
product_colors = []
return JsonResponse({'success': True, 'sizes': product_colors})
# @api_view(['POST', ])
# def get_spec_info(request, product_id):
# variant = request.data.get('variant')
# color = request.data.get('color')
# size = request.data.get('size')
# print(variant)
# print(color)
# print(size)
# try:
# product = Product.objects.get(id=product_id)
# except:
# product = None
# if product:
# print("product ase")
# try:
# product_spec = ProductSpecification.objects.filter(
# product_id=product_id, weight_unit=variant, color=color, size=size,specification_status="Published").first()
# except:
# product_spec = None
# print(product_spec)
# if product_spec:
# print("speciifcation ase")
# spec_serializer = ProductSpecificationSerializer1(
# product_spec, many=False)
# prod_data = spec_serializer.data
# else:
# prod_data = {}
# else:
# prod_data = {}
# return JsonResponse({'success': True, 'specification': prod_data})
@api_view(['POST', ])
def get_spec_info(request, product_id):
variant = request.data.get('variant')
color = request.data.get('color')
size = request.data.get('size')
print(variant)
print(color)
print(size)
try:
product = Product.objects.get(id=product_id)
except:
product = None
if product:
print("product ase")
try:
product_spec = ProductSpecification.objects.filter(
product_id=product_id, weight_unit=variant, color=color, size=size,specification_status="Published").first()
except:
product_spec = None
print(product_spec)
if product_spec:
specification_id = product_spec.id
print("speciifcation ase")
print(product_spec.is_own)
if product_spec.is_own == True:
print("amar nijer product")
spec_serializer = ProductSpecificationSerializer1(
product_spec, many=False)
prod_data = spec_serializer.data
else:
spec_serializer = ProductSpecificationSerializer1(
product_spec, many=False)
prod_data = spec_serializer.data
print("fbdwsufbdufbdufbgdu")
print(prod_data)
url = own_site_path + "productdetails/not_own_quantity_check/" +str(specification_id)+ "/"
own_response = requests.get(url = url)
own_response = own_response.json()
print(own_response)
if own_response["success"] == True:
#update the quantity
prod_data["quantity"] = own_response["quantity"]
url2 = own_site_path + "productdetails/check_price/" +str(specification_id)+ "/"
own_response2 = requests.get(url = url2)
own_response2 = own_response2.json()
print(own_response2)
if own_response2["success"] == False:
product_spec.on_hold = True
product_spec.save()
prod_data["on_hold"] = True
else:
prod_data = {}
else:
prod_data = {}
return JsonResponse({'success': True, 'specification': prod_data})
@api_view(['POST', ])
def color_size(request, product_id):
try:
product = Product.objects.get(id=product_id)
except:
product = None
if product:
product_spec = ProductSpecification.objects.filter(
product_id=product_id) & ProductSpecification.objects.filter(quantity__gte=1)
product_colors = list(product_spec.values_list(
'color', flat=True).distinct())
return JsonResponse({'success': True, 'message': 'The colors are shown', 'colors': product_colors})
else:
product_colors = []
return JsonResponse({'success': False, 'message': 'The colors are not shown', 'colors': product_colors})
@api_view(['POST', ])
def available_sizes(request, product_id):
color = request.data.get("color")
try:
product = Product.objects.get(id=product_id)
except:
product = None
if product:
product_spec = ProductSpecification.objects.filter(product_id=product_id) & ProductSpecification.objects.filter(
color=color) & ProductSpecification.objects.filter(quantity__gte=1)
product_sizes = list(product_spec.values_list(
'size', flat=True).distinct())
product_quantities = list(
product_spec.values_list('quantity', flat=True))
dic = {}
for i in range(len(product_sizes)):
item = {product_sizes[i]: product_quantities[i]}
dic.update(item)
return JsonResponse({'success': True, 'message': 'The colors are shown', 'sizes': product_sizes, 'quantities': dic})
else:
product_sizes = []
return JsonResponse({'success': False, 'message': 'The colors are not shown', 'sizes': product_sizes})
@api_view(['POST', ])
def add_points(request):
if request.method == 'POST':
pointserializer = ProductPointSerializer(data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
return JsonResponse(pointserializer.errors)
# This updates the product points
@api_view(['POST', ])
def update_points(request, product_id):
try:
product = ProductPoint.objects.filter(product_id=product_id).last()
if request.method == 'POST':
pointserializer = ProductPointSerializer(
product, data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
return JsonResponse(pointserializer.errors)
except ProductPoint.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
# This updates the product points
@api_view(['POST', ])
def delete_points(request, product_id):
try:
product = ProductPoint.objects.filter(product_id=product_id)
if request.method == 'POST':
product.delete()
return JsonResponse({'message': 'The product points have been deleted'})
except ProductPoint.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
# This adds the current product price
@api_view(['POST', ])
def add_price(request):
if request.method == 'POST':
pointserializer = ProductPriceSerializer(data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
return JsonResponse(pointserializer.errors)
# This updates the current product price
@api_view(['POST', ])
def update_price(request, product_id):
try:
product = ProductPrice.objects.filter(product_id=product_id).last()
if request.method == 'POST':
pointserializer = ProductPriceSerializer(
product, data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
return JsonResponse(pointserializer.errors)
except ProductPrice.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
# This updates the product points
@api_view(['POST', ])
def delete_price(request, product_id):
try:
product = ProductPrice.objects.filter(product_id=product_id)
if request.method == 'POST':
product.delete()
return JsonResponse({'message': 'The product prices have been deleted'})
except ProductPoint.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
# This adds product points
@api_view(['POST', ])
def add_specification(request):
if request.method == 'POST':
pointserializer = ProductSpecificationSerializer(data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse({'success': True, 'message': 'Data is shown below', 'data': pointserializer.data}, status=status.HTTP_201_CREATED)
else:
return JsonResponse({'success': False, 'message': 'Data could not be inserted', 'data': {}})
# This updates the latest product specification
@api_view(['POST', ])
def update_specification(request, product_id):
try:
product = ProductSpecification.objects.filter(
product_id=product_id).last()
if request.method == 'POST':
pointserializer = ProductSpecificationSerializer(
product, data=request.data)
if pointserializer.is_valid():
pointserializer.save()
return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
return Response(pointserializer.errors)
except ProductPoint.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
# This deletes the product specification
@api_view(['POST', ])
def delete_specification(request, product_id):
try:
product = ProductSpecification.objects.filter(product_id=product_id)
if request.method == 'POST':
product.delete()
return JsonResponse({'message': 'The product specification have been deleted'})
except ProductPoint.DoesNotExist:
return JsonResponse({'message': 'This product does not exist'}, status=status.HTTP_404_NOT_FOUND)
@api_view(['GET', ])
def show_specification(request, product_id):
try:
title = Product.objects.get(id=product_id)
except:
title = None
if title:
product_title = title.title
else:
product_title = ''
try:
product = ProductSpecification.objects.filter(product_id=product_id,admin_status="Confirmed")
except:
product = None
if product:
productserializer = ProductSpecificationSerializer1(product, many=True)
data = productserializer.data
else:
data = {}
return JsonResponse({
'success': True,
'message': 'Data is shown below',
'product_title': product_title,
'data': data
})
@api_view(['GET', ])
def show_seller_specification(request, product_id):
try:
title = Product.objects.get(id=product_id)
except:
title = None
if title:
product_title = title.title
else:
product_title = ''
try:
product = ProductSpecification.objects.filter(product_id=product_id)
except:
product = None
if product:
productserializer = ProductSpecificationSerializer1(product, many=True)
data = productserializer.data
else:
data = {}
return JsonResponse({
'success': True,
'message': 'Data is shown below',
'product_title': product_title,
'data': data
})
# @api_view(['POST',])
# def add_spec(request,product_id):
# specification_data_value ={
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty':request.data.get("warranty"),
# 'warranty_unit':request.data.get("warranty_unit"),
# 'unit':request.data.get("product_unit"),
# }
# product_price ={
# 'product_id': product_id,
# 'price' : request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# #'currency_id': request.data.get('currency_id')
# }
# product_discount ={
# 'product_id': product_id,
# 'amount': request.data.get("discount_amount"),
# 'discount_type': request.data.get("discount_type"),
# #'start_date' : '2020-09-05',
# #'end_date' : data['discount_end_date']
# 'start_date': request.data.get("discount_start_date"),
# 'end_date': request.data.get("discount_end_date")
# }
# product_point ={
# 'product_id': product_id,
# 'point': request.data.get("point_amount"),
# # 'end_date': data['point_end_date']
# 'start_date': request.data.get("point_start_date"),
# 'end_date': request.data.get("point_end_date")
# }
# delivery_info = {
# 'height': request.data.get("delivery_height"),
# 'width': request.data.get("delivery_width"),
# 'length': request.data.get("delivery_length"),
# 'weight': request.data.get("delivery_weight"),
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': request.data.get("delivery_inside_city_charge"),
# 'charge_outside': request.data.get("delivery_outside_city_charge"),
# }
# print("delivery Info", delivery_info)
# if request.method == 'POST':
# flag = 0
# spec={}
# price={}
# discount= {}
# point={}
# delivery={}
# try:
# product_spec= ProductSpecificationSerializer(data=specification_data_value)
# if product_spec.is_valid():
# product_spec.save()
# spec.update(product_spec.data)
# else:
# flag= flag+1
# product_price.update({'specification_id':spec['id']})
# product_price=ProductPriceSerializer (data = product_price)
# if product_price.is_valid():
# product_price.save()
# price.update(product_price.data)
# else:
# flag= flag+1
# if product_discount['discount_type'] is None:
# discount={}
# else:
# product_discount.update({'specification_id':spec['id']})
# product_dis = ProductDiscountSerializer (data = product_discount)
# if product_dis.is_valid():
# product_dis.save()
# discount.update(product_dis.data)
# else:
# flag= flag+1
# product_point.update({'specification_id':spec['id']})
# product_point_value= ProductPointSerializer (data=product_point)
# if product_point_value.is_valid():
# product_point_value.save()
# point.update(product_point_value.data)
# else:
# flag= flag+1
# delivery_info.update({'specification_id':spec['id']})
# delivery_value= DeliveryInfoSerializer (data=delivery_info)
# if delivery_value.is_valid():
# delivery_value.save()
# delivery.update(delivery_value.data)
# else:
# flag= flag+1
# if flag>0:
# return JsonResponse ({
# "success": False,
# "message": "Something went wrong !!",
# })
# else:
# return JsonResponse ({
# "success": True,
# "message": "Specification data has been inserted Successfully",
# "specification": spec,
# "price":price,
# "discount": discount,
# "point": point,
# "delivery": delivery
# })
# except:
# return JsonResponse ({
# "success": False,
# "message": "Something went wrong !!"
# })
# @api_view(['POST', 'GET'])
# def edit_spec(request, specification_id):
# current_date = date.today()
# print("current_date")
# print(current_date)
# current_date = str(current_date)
# print(request.data)
# print(specification_id)
# try:
# product_spec = ProductSpecification.objects.get(id=specification_id)
# except:
# product_spec = None
# if product_spec:
# product_id = product_spec.product_id
# else:
# product_id = 0
# print(product_id)
# if request.method == 'POST':
# vat = request.data.get("vat")
# if vat == "":
# vat = 0.00
# specification_data_value = {
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty': request.data.get("warranty"),
# 'warranty_unit': request.data.get("warranty_unit"),
# 'unit': request.data.get("product_unit"),
# 'vat': vat,
# }
# # price = request.data.get("price")
# # if price == "":
# # price = 0.00
# # purchase_price = request.data.get("purchase_price")
# # if purchase_price == "":
# # purchase_price = 0.00
# # product_price = {
# # 'product_id': product_id,
# # 'price': price,
# # 'specification_id': specification_id,
# # 'purchase_price': purchase_price
# # # 'currency_id': request.data.get('currency_id')
# # }
# discount_type = request.data.get("discount_type")
# if discount_type == "none":
# print("dhbfdufbrewyfbrewyfgryfregfbyrefbreyfbryfb")
# product_discount = {
# 'product_id': product_id,
# 'specification_id': specification_id,
# 'amount': 0.00,
# 'discount_type': discount_type,
# # 'start_date' : '2020-09-05',
# # 'end_date' : data['discount_end_date']
# 'start_date': current_date,
# 'end_date': current_date
# }
# print(product_discount)
# else:
# discount_amount = request.data.get("discount_amount")
# if discount_amount == "":
# discount_amount = 0.00
# discount_end_date = request.data.get("discount_end_date")
# if discount_end_date == "":
# discount_end_date = current_date
# print(discount_end_date)
# discount_start_date = request.data.get("discount_start_date")
# if discount_start_date == "":
# discount_start_date = current_date
# print(discount_start_date)
# product_discount = {
# 'product_id': product_id,
# 'amount': discount_amount,
# 'discount_type': discount_type,
# # 'start_date' : '2020-09-05',
# # 'end_date' : data['discount_end_date']
# 'start_date': discount_start_date,
# 'specification_id': specification_id,
# 'end_date': discount_end_date
# }
# print("discounttt")
# print(product_discount)
# point_amount = request.data.get("point_amount")
# if point_amount == "":
# point_amount = 0.00
# point_end_date = request.data.get("point_end_date")
# if point_end_date == "":
# point_end_date = current_date
# point_start_date = request.data.get("point_start_date")
# if point_start_date == "":
# point_start_date = current_date
# product_point = {
# 'product_id': product_id,
# 'point': point_amount,
# # 'end_date': data['point_end_date']
# 'start_date': point_start_date,
# 'specification_id': specification_id,
# 'end_date': point_end_date
# }
# delivery_height = request.data.get("delivery_height")
# if delivery_height == "":
# delivery_height = 0.0
# delivery_width = request.data.get("delivery_width")
# if delivery_width == "":
# delivery_width = 0.0
# delivery_length = request.data.get("delivery_length")
# if delivery_length == "":
# delivery_length = 0.0
# delivery_weight = request.data.get("delivery_weight")
# if delivery_weight == "":
# delivery_weight = 0.0
# delivery_inside = request.data.get("delivery_inside_city_charge")
# if delivery_inside == "":
# delivery_inside = 0
# delivery_outside = request.data.get("delivery_outside_city_charge")
# if delivery_outside == "":
# delivery_outside = 0
# delivery_info = {
# 'height': delivery_height,
# 'width': delivery_width,
# 'length': delivery_length,
# 'weight': delivery_weight,
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': delivery_inside,
# 'specification_id': specification_id,
# 'charge_outside': delivery_outside
# }
# try:
# try:
# spec = ProductSpecification.objects.get(id=specification_id)
# except:
# spec = None
# if spec:
# specification_serializer = ProductSpecificationSerializer(
# spec, data=specification_data_value)
# if specification_serializer.is_valid():
# print("spec save hochche")
# specification_serializer.save()
# values = specification_serializer.data
# else:
# return Response({'success': False, 'message': 'Product Specification could not be updated'})
# # try:
# # price = ProductPrice.objects.get(
# # specification_id=specification_id)
# # except:
# # price = None
# # if price:
# # price_serializer = ProductPriceSerializer(
# # price, data=product_price)
# # if price_serializer.is_valid():
# # price_serializer.save()
# # print("price save hochche")
# # price_data = price_serializer.data
# # else:
# # return Response({'success': False, 'message': 'Product Price could not be updated'})
# try:
# points = ProductPoint.objects.get(
# specification_id=specification_id)
# except:
# points = None
# print(points)
# if points:
# point_serial = ProductPointSerializer(
# points, data=product_point)
# if point_serial.is_valid():
# print("pOINT SAVE HOCHCHE")
# point_serial.save()
# point_data = point_serial.data
# else:
# print(point_serial.errors)
# else:
# point_serial = ProductPointSerializer(data=product_point)
# if point_serial.is_valid():
# print("pOINT SAVE HOCHCHE")
# point_serial.save()
# point_data = point_serial.data
# else:
# print(point_serial.errors)
# try:
# delivery = DeliveryInfo.objects.get(
# specification_id=specification_id)
# except:
# delivery = None
# if delivery:
# delivery_serial = DeliveryInfoSerializer(
# delivery, data=delivery_info)
# if delivery_serial.is_valid():
# delivery_serial.save()
# print("delivery hocchche")
# delivery_data = delivery_serial.data
# else:
# delivery_serial = DeliveryInfoSerializer(
# data=delivery_info)
# if delivery_serial.is_valid():
# delivery_serial.save()
# print("delivery hocchche")
# delivery_data = delivery_serial.data
# try:
# discount = discount_product.objects.get(
# specification_id=specification_id)
# except:
# discount = None
# if discount:
# discount_serializer = ProductDiscountSerializer(
# discount, data=product_discount)
# if discount_serializer.is_valid():
# print("discount save hochche")
# discount_serializer.save()
# discount_data = discount_serializer.data
# else:
# discount_serializer = ProductDiscountSerializer(
# data=product_discount)
# if discount_serializer.is_valid():
# print("discount save hochche")
# discount_serializer.save()
# discount_data = discount_serializer.data
# return Response({'success': True, 'message': 'Edit is successful'})
# except:
# return Response({'success': False, 'message': 'Something went wrong !!'})
@api_view(['POST', 'GET'])
def edit_spec(request, specification_id):
current_date = date.today()
print("current_date")
print(current_date)
current_date = str(current_date)
print(request.data)
print(specification_id)
try:
product_spec = ProductSpecification.objects.get(id=specification_id)
except:
product_spec = None
if product_spec:
product_id = product_spec.product_id
else:
product_id = 0
print(product_id)
if request.method == 'POST':
vat = request.data.get("vat")
if vat == "":
vat = 0.00
specification_data_value = {
'product_id': product_id,
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': vat,
'is_own' :True
}
# price = request.data.get("price")
# if price == "":
# price = 0.00
# purchase_price = request.data.get("purchase_price")
# if purchase_price == "":
# purchase_price = 0.00
# product_price = {
# 'product_id': product_id,
# 'price': price,
# 'specification_id': specification_id,
# 'purchase_price': purchase_price
# # 'currency_id': request.data.get('currency_id')
# }
discount_type = request.data.get("discount_type")
if discount_type == "none":
product_discount = {
'product_id': product_id,
'specification_id': specification_id,
'amount': 0.00,
'discount_type': discount_type,
# 'start_date' : '2020-09-05',
# 'end_date' : data['discount_end_date']
'start_date': current_date,
'end_date': current_date
}
else:
discount_amount = request.data.get("discount_amount")
if discount_amount == "":
discount_amount = 0.00
if discount_amount == None:
discount_amount = 0.00
discount_end_date = request.data.get("discount_end_date")
if discount_end_date == "":
discount_end_date = current_date
if discount_end_date == None:
discount_end_date = current_date
discount_start_date = request.data.get("discount_start_date")
if discount_start_date == "":
discount_start_date = current_date
if discount_start_date == None:
discount_start_date = current_date
product_discount = {
'product_id': product_id,
'amount': discount_amount,
'discount_type': discount_type,
# 'start_date' : '2020-09-05',
# 'end_date' : data['discount_end_date']
'start_date': discount_start_date,
'specification_id': specification_id,
'end_date': discount_end_date
}
point_amount = request.data.get("point_amount")
if point_amount == "":
point_amount = 0.00
if point_amount == None:
point_amount = 0.00
point_end_date = request.data.get("point_end_date")
if point_end_date == "":
point_end_date = current_date
if point_end_date == None:
point_end_date = current_date
point_start_date = request.data.get("point_start_date")
if point_start_date == "":
point_start_date = current_date
if point_start_date == None:
point_start_date = current_date
product_point = {
'product_id': product_id,
'point': point_amount,
# 'end_date': data['point_end_date']
'start_date': point_start_date,
'specification_id': specification_id,
'end_date': point_end_date
}
delivery_height = request.data.get("delivery_height")
if delivery_height == "":
delivery_height = 0.0
delivery_width = request.data.get("delivery_width")
if delivery_width == "":
delivery_width = 0.0
delivery_length = request.data.get("delivery_length")
if delivery_length == "":
delivery_length = 0.0
delivery_weight = request.data.get("delivery_weight")
if delivery_weight == "":
delivery_weight = 0.0
# delivery_inside = request.data.get("delivery_inside_city_charge")
# if delivery_inside == "":
# delivery_inside = 0
# delivery_outside = request.data.get("delivery_outside_city_charge")
# if delivery_outside == "":
# delivery_outside = 0
# delivery_info = {
# 'height': delivery_height,
# 'width': delivery_width,
# 'length': delivery_length,
# 'weight': delivery_weight,
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': delivery_inside,
# 'specification_id': specification_id,
# 'charge_outside': delivery_outside
# }
delivery_info = {
'height': request.data.get("delivery_height"),
'width': request.data.get("delivery_width"),
'length': request.data.get("delivery_length"),
'weight': request.data.get("delivery_weight"),
'measument_unit': request.data.get("delivery_product_unit"),
'delivery_free': request.data.get("delivery_free"),
}
try:
try:
spec = ProductSpecification.objects.get(id=specification_id)
except:
spec = None
if spec:
specification_serializer = ProductSpecificationSerializer(
spec, data=specification_data_value)
if specification_serializer.is_valid():
print("spec save hochche")
specification_serializer.save()
values = specification_serializer.data
else:
return Response({'success': False, 'message': 'Product Specification could not be updated'})
# try:
# price = ProductPrice.objects.get(
# specification_id=specification_id)
# except:
# price = None
# if price:
# price_serializer = ProductPriceSerializer(
# price, data=product_price)
# if price_serializer.is_valid():
# price_serializer.save()
# print("price save hochche")
# price_data = price_serializer.data
# else:
# return Response({'success': False, 'message': 'Product Price could not be updated'})
try:
points = ProductPoint.objects.get(
specification_id=specification_id)
except:
points = None
if points:
point_serial = ProductPointSerializer(
points, data=product_point)
if point_serial.is_valid():
point_serial.save()
point_data = point_serial.data
else:
pass
else:
point_serial = ProductPointSerializer(data=product_point)
if point_serial.is_valid():
point_serial.save()
point_data = point_serial.data
else:
print("point2")
print(point_serial.errors)
try:
delivery = DeliveryInfo.objects.get(
specification_id=specification_id)
except:
delivery = None
if delivery:
delivery_serial = DeliveryInfoSerializer(
delivery, data=delivery_info)
if delivery_serial.is_valid():
delivery_serial.save()
delivery_data = delivery_serial.data
else:
delivery_serial = DeliveryInfoSerializer(
data=delivery_info)
if delivery_serial.is_valid():
delivery_serial.save()
delivery_data = delivery_serial.data
try:
discount = discount_product.objects.get(
specification_id=specification_id)
except:
discount = None
if discount:
discount_serializer = ProductDiscountSerializer(
discount, data=product_discount)
if discount_serializer.is_valid():
discount_serializer.save()
discount_data = discount_serializer.data
else:
discount_serializer = ProductDiscountSerializer(
data=product_discount)
if discount_serializer.is_valid():
discount_serializer.save()
discount_data = discount_serializer.data
data_val = {
'option' : request.data.get("delivery_option"),
'spec': specification_id,
# 'arrayForDelivery': [
# {
# 'selectedDistrict': 'Dhaka',
# 'selectedThana':[
# 'Banani',
# 'Gulshan',
# 'Rampura',
# 'Dhanmondi'
# ]
# },
# {
# 'selectedDistrict': 'Barishal',
# 'selectedThana':[
# 'Hizla',
# 'Muladi',
# 'Borguna',
# 'Betagi'
# ]
# }
# ]
'arrayForDelivery': request.data.get("arrayForDelivery")
}
print("values for specification")
print(data_val)
# print("before calling method")
value = add_delivery_data1(data_val)
print(value)
return Response({'success': True, 'message': 'Edit is successful'})
except:
return Response({'success': False, 'message': 'Something went wrong !!'})
# @api_view(['POST',])
# def edit_spec(request,specification_id):
# try:
# spec = ProductSpecification.objects.get(id=specification_id)
# except:
# spec = None
# if spec:
# pointserializer = ProductSpecificationSerializer(spec,data=request.data)
# if pointserializer.is_valid():
# pointserializer.save()
# return JsonResponse(pointserializer.data, status=status.HTTP_201_CREATED)
# return Response (pointserializer.errors)
# @api_view(['POST', 'GET'])
# def delete_spec(request, specification_id):
# if request.method == 'POST':
# try:
# product_price = ProductPrice.objects.filter(
# specification_id=specification_id)
# if product_price.exists():
# product_price.delete()
# product_discount = discount_product.objects.filter(
# specification_id=specification_id)
# if product_discount.exists():
# product_discount.delete()
# product_point = ProductPoint.objects.filter(
# specification_id=specification_id)
# if product_point.exists():
# product_point.delete()
# Delivery_info = DeliveryInfo.objects.filter(
# specification_id=specification_id)
# if Delivery_info.exists():
# Delivery_info.delete()
# spec = ProductSpecification.objects.filter(id=specification_id)
# if spec.exists():
# spec.delete()
# return JsonResponse({
# 'success': True,
# 'message': 'The product specification have been deleted'})
# except:
# return JsonResponse({
# 'success': False,
# 'message': 'The product specification could not be deleted'})
@api_view(['POST', 'GET'])
def delete_spec(request, specification_id):
if(request.method == "POST"):
try:
price = ProductPrice.objects.filter(specification_id= specification_id)
price.delete()
points = ProductPoint.objects.filter(specification_id=specification_id)
points.delete()
discount = discount_product.objects.filter(specification_id=specification_id)
discount.delete()
code = ProductCode.objects.filter(specification_id = specification_id)
code.delete()
invenprice = Inventory_Price.objects.filter(specification_id=specification_id)
invenprice.delete()
warehouseinfo = WarehouseInfo.objects.filter(specification_id=specification_id)
warehouseinfo.delete()
shopinfo = ShopInfo.objects.filter(specification_id=specification_id)
shopinfo.delete()
invenrep = inventory_report.objects.filter(specification_id=specification_id)
invenrep.delete()
deliveryinfo = DeliveryInfo.objects.filter(specification_id=specification_id)
deliveryinfo.delete()
subtract = subtraction_track.objects.filter(specification_id=specification_id)
subtract.delete()
deliveryarea = product_delivery_area.objects.filter(specification_id=specification_id)
deliveryarea.delete()
specimage = SpecificationImage.objects.filter(specification_id=specification_id)
specimage.delete()
orderdetail = OrderDetails.objects.filter(specification_id=specification_id)
orderdetail.delete()
prospec = ProductSpecification.objects.filter(id=specification_id)
prospec.delete()
return Response({
'success': True,
'message': 'data has been deleted successfully !!'
})
except:
return Response({
'success':False,
'Message': 'Some internal problem occurs while deleting the value'
})
@api_view(['GET', ])
def show(request, product_id):
#url = reverse('product_price_point_specification:showspec',args=[product_id])
#data= requests.get(url)
#url= reverse('product_price_point_specification:showspec',args=[product_id])
#main = str(settings.BASE_DIR) + url
# print(main)
#data = requests.get(main)
url = request.build_absolute_uri(
reverse('product_price_point_specification:showspec', args=[product_id]))
# print("------")
# print(url)
data = requests.get(url)
return HttpResponse(data)
# This changes the comments,replies,reviews and order tables
@api_view(['POST', ])
def transfer(request, user_id):
# Here userid provided is the newly verified userid
try:
existing_user = user_relation.objects.filter(
verified_user_id=user_id).last()
print(existing_user)
except:
existing_user = None
if existing_user is not None:
# Change the ids in the certain table
# print(type(existing_user.verified_user_id))
# print(existing_user.non_verified_user_id)
user_id = existing_user.verified_user_id
non_verified_user_id = existing_user.non_verified_user_id
# Update all the order tables
orders = Order.objects.filter(non_verified_user_id=non_verified_user_id).update(
user_id=user_id, non_verified_user_id=None)
# Update the Billing address
billing_address = BillingAddress.objects.filter(
non_verified_user_id=non_verified_user_id).update(user_id=user_id, non_verified_user_id=None)
# Update the comment,reply and review tables
comments = Comment.objects.filter(non_verified_user_id=non_verified_user_id).update(
user_id=user_id, non_verified_user_id=None)
reply = CommentReply.objects.filter(non_verified_user_id=non_verified_user_id).update(
user_id=user_id, non_verified_user_id=None)
reviews = Reviews.objects.filter(non_verified_user_id=non_verified_user_id).update(
user_id=user_id, non_verified_user_id=None)
return JsonResponse({'message': 'The user does exist'})
else:
return JsonResponse({'message': 'The user does not exist'})
@api_view(['GET', ])
def product_detail(request, product_id):
try:
product = Product.objects.filter(id=product_id).last()
except:
product = None
if product is not None:
product_serializer = ProductDetailSerializer2(product, many=False)
return JsonResponse({'success': True, 'message': 'The data is shown below', 'data': product_serializer.data}, safe=False)
else:
return JsonResponse({'success': False, 'message': 'This product does not exist', 'data':{}})
# --------------------------------- Product Cupon -------------------------------
@api_view(["GET", "POST"])
def insert_cupon(request):
'''
This is for inserting cupon code into the databse. Admin will set the cupon code and it will apear to the users while buying a product.
Calling http://127.0.0.1:8000/cupons/create_cupon/ will cause to invoke this Api. This Api just have Post response.
Post Response:
cupon_code : This is a character field. This will be cupon named after the inserting name value.
amount : This will be the amount which will be deducted from the user payable balance.
start_from: This is DateField. It will be created automatically upon the creation of a cupon.
valid_to: This is another DateField. While creating a cupon admin will set the date.
is_active : This is a BooleanField. This will indicate wheather the cupon is active or not. Using this data, cupon can be deactivated before ending
the validation time.
'''
if(request.method == "POST"):
serializers = CupponSerializer(data=request.data)
if(serializers.is_valid()):
serializers.save()
return Response(serializers.data, status=status.HTTP_201_CREATED)
return Response(serializers.errors)
@api_view(["GET", "POST"])
def get_all_cupons(request):
'''
This is for getting all the cupons. Calling http://127.0.0.1:8000/cupons/all_cupon/ will cause to invoke this Api.
The Get Response will return following structured datas.
Get Response:
[
{
"id": 2,
"cupon_code": "30% Off",
"amount": 50.0,
"start_from": "2020-08-27",
"valid_to": "2020-09-30",
"is_active": false
},
{
"id": 3,
"cupon_code": "25 Taka Off",
"amount": 25.0,
"start_from": "2020-08-27",
"valid_to": "2020-10-27",
"is_active": false
}
]
'''
if(request.method == "GET"):
queryset = Cupons.objects.all()
serializers = CupponSerializer(queryset, many=True)
return Response(serializers.data)
@api_view(["GET", "POST"])
def update_specific_cupons(request, cupon_id):
'''
This is for updating a particular cupon. Calling http://127.0.0.1:8000/cupons/update_cupon/4/ will cause to invoke this Api.
While calling this Api, as parameters cupon id must need to be sent.
After updating expected Post Response:
{
"id": 4,
"cupon_code": "25 Taka Off",
"amount": 25.0,
"start_from": "2020-08-27",
"valid_to": "2020-10-27",
"is_active": true
}
'''
try:
cupon = Cupons.objects.get(pk=cupon_id)
except:
return Response({'Message': 'Check wheather requested data exists or not'})
if(request.method == "GET"):
cupon_serializer = CupponSerializer(cupon, many=False)
return Response(cupon_serializer.data)
elif(request.method == "POST"):
Cupon_serializers = CupponSerializer(cupon, data=request.data)
if(Cupon_serializers.is_valid()):
Cupon_serializers.save()
return Response(Cupon_serializers.data, status=status.HTTP_201_CREATED)
return Response(Cupon_serializers.errors)
@api_view(["GET", "POST"])
def delete_specific_cupons(request, cupon_id):
'''
This is for deleting a particular cupon value. Calling 127.0.0.1:8000/cupons/delete_cupon/4/ will cause to invoke this Api.
After performing delete operation successfully this api will provide following response.
Successful Post Response:
[
"Cupon has been deleted successfully"
]
Unsuccessful Post Response:
{
"Message": "Some internal problem occurs while deleting the value"
}
'''
try:
cupon = Cupons.objects.get(pk=cupon_id)
except:
return Response({'Message': 'Some internal problem occurs while deleting the value'})
if(request.method == "POST"):
cupon.delete()
return Response({'Cupon has been deleted successfully'})
# --------------------------- Product Discount -----------------------
@api_view(["GET", "POST"])
def get_all_discount_value(request):
'''
This api is for getting all the discount related information. Calling http://127.0.0.1:8000/discount/all_discount/ will invoke
this API. This API just have get response.
GET Response:
discount_type (This will be a Chartype data. This will return the type of discount like Flat, Flash, Wholesale etc.)
amount (This will return the amount which will be apply where discount is applicable.)
start_date (This is the discount start date. From this date discount will be started.)
end_date (This is discount end date. On this date, discount will be end.)
max_amount (Sometimes, admin can restrict the highest level of amount for discount. This value represents that highest amount value.)
'''
if(request.method == "GET"):
queryset = discount_product.objects.all()
discount_serializers = ProductDiscountSerializer(queryset, many=True)
return Response(discount_serializers.data)
@api_view(["GET", "POST"])
def insert_specific_discount_value(request):
'''
This Api is for just inserting the particular discount value corresponding to a product. It has just Post response. Calling
http://127.0.0.1:8000/discount/insert_specific/ cause to invoke this api.
POST Response:
Following values field this api expects while performing post response.
Discount (It will be type of discount, simply a name.)
amount (This will be a float value. This amount value will be used to calculate the discount value)
start_date ( This is the date from when the discount will be started.)
end_date (On this date, the discount will end)
max_amount (Admin can set the highest amount of discount. Something like 30% discount upto 50 taka. Here, max amount 50 taka.)
product_id or group_product_id ( product_id or group_product_id, on which the discount will be performed must need to provide.)
'''
if(request.method == "POST"):
discount_serializers = ProductDiscountSerializer(data=request.data)
if(discount_serializers.is_valid()):
discount_serializers.save()
return Response(discount_serializers.data, status=status.HTTP_201_CREATED)
return Response(discount_serializers.errors)
@api_view(["GET", "POST"])
def get_update_specific_value(request, product_id):
'''
This Api is for getting a particular discount value. This will need to update a particular information. Admin may change the end date of discount or
may increase the amount value. Calling http://127.0.0.1:8000/discount/specific_value/3/ will cause to invoke this API. This Api has both
Post and Get response.
prams : Product_id
Get Response:
discount_type (This will be a Chartype data. This will return the type of discount like Flat, Flash, Wholesale etc.)
amount (This will return the amount which will be apply where discount is applicable.)
start_date (This is the discount start date. From this date discount will be started.)
end_date (This is discount end date. On this date, discount will be end.)
max_amount (Sometimes, admin can restrict the highest level of amount for discount. This value represents that highest amount value.)
POST Response:
Following values field this api expects while performing post response.
Discount (It will be type of discount, simply a name.)
amount (This will be a float value. This amount value will be used to calculate the discount value)
start_date ( This is the date from when the discount will be started.)
end_date (On this date, the discount will end)
max_amount (Admin can set the highest amount of discount. Something like 30% discount upto 50 taka. Here, max amount 50 taka.)
product_id or group_product_id ( product_id or group_product_id, on which the discount will be performed must need to provide.)
'''
# Demo Values
try:
specific_values = discount_product.objects.get(product_id=product_id)
except:
return Response({'message': 'This value does not exist'})
if(request.method == "GET"):
discount_serializer_value = ProductDiscountSerializer(
specific_values, many=False)
return Response(discount_serializer_value.data)
elif(request.method == "POST"):
try:
discount_serializer_value = ProductDiscountSerializer(
specific_values, data=request.data)
if(discount_serializer_value.is_valid()):
discount_serializer_value.save()
return Response(discount_serializer_value.data, status=status.HTTP_201_CREATED)
return Response(discount_serializer_value.errors)
except:
return Response({'message': 'Discount value could not be updated'})
@api_view(['POST', 'GET'])
def delete_discount_value(request, product_id):
'''
This Api is for deleting a particular discount value. Based on the provided product_id or group_product_id this will delet the discount value.
Calling http://127.0.0.1:8000/discount/discount_delete/4 will cause to invoke this api. After deleting the value, in response this api will
send a successful message. If it can not delete then it will provide an error message.
prams : product_id
'''
try:
specific_values = discount_product.objects.get(product_id=product_id)
except:
return Response({'message': 'There is no value to delete'})
if request.method == 'POST':
specific_values.delete()
return Response({'message': ' Value is successfully deleted'}, status=status.HTTP_204_NO_CONTENT)
@api_view(["GET", "POST"])
def get_product_lists(request, order_id):
if(request.method == "GET"):
try:
ware_house = []
shops = []
order_info = OrderDetails.objects.filter(order_id=order_id)
print(order_info)
for orders in order_info:
all_specification = ProductSpecification.objects.get(
product_id=orders.product_id, size=orders.product_size, color=orders.product_color)
print(all_specification)
ware_house_info = Warehouse.objects.filter(
specification_id=all_specification.id)
if ware_house_info:
ware_house_data = WareHouseSerializer(
ware_house_info, many=True)
ware_house.append(ware_house_data.data)
shop_info = Shop.objects.filter(
specification_id=all_specification.id)
if shop_info.exists():
shop_data = ShopSerializer(shop_info, many=True)
shops.append(shop_data.data)
except:
return Response({'Message': 'Check whether requested data exists or not'})
return Response({
"success": True,
"Message": "Data is shown bellow",
"warehouse": ware_house,
"Shop": shops
})
@api_view(["GET", ])
def get_inventory_lists(request, order_details_id):
try:
product = OrderDetails.objects.get(id=order_details_id)
except:
product = None
if product:
product_id = product.product_id
product_size = product.product_size
product_color = product.product_color
try:
spec = ProductSpecification.objects.get(
product_id=product_id, size=product_size, color=product_color)
except:
spec = None
if spec:
specification_id = spec.id
try:
warehouses = Warehouse.objects.filter(
specification_id=specification_id)
except:
warehouses = None
if warehouses:
warehouses_serializer = WareHouseSerializer(
warehouses, many=True)
warehouse_data = warehouses_serializer.data
else:
warehouse_data = []
try:
warehouses = Shop.objects.filter(
specification_id=specification_id)
except:
warehouses = None
if warehouses:
warehouses_serializer = ShopSerializer(warehouses, many=True)
shop_data = warehouses_serializer.data
else:
shop_data = []
else:
warehouse_data = []
shop_data = []
else:
warehouse_data = []
shop_data = []
return JsonResponse({'success': True, 'message': 'Data is shown below', 'warehouse_data': warehouse_data, 'shop_data': shop_data})
@api_view(["POST", ])
def subtract_quantity(request, order_details_id):
warehouse_id = request.data.get("warehouse_id")
shop_id = request.data.get("shop_id")
quantity = request.data.get("quantity")
quantity = int(quantity)
if warehouse_id is None:
inventory_id = shop_id
try:
product = OrderDetails.objects.get(id=order_details_id)
except:
product = None
if product:
item_quantity = product.total_quantity
item_remaining = product.remaining
if item_remaining > 0:
# make the subtraction
check = item_remaining - int(quantity)
if check >= 0:
print("quantity thik dise")
product.remaining -= quantity
product.save()
item_remaining = product.remaining
item_quantity = product.quantity
try:
shop = Shop.objects.get(id=shop_id)
except:
shop = None
if shop:
shop.product_quantity -= quantity
shop.save()
shop_serializer = ShopSerializer(shop, many=False)
shop_data = shop_serializer.data
else:
shop_data = {}
return JsonResponse({'success': True, 'message': 'The amount has been subtracted', 'remaining': item_remaining, 'quantity': item_quantity, 'shop_data': shop_data})
else:
print("quantity thik dey nai")
return JsonResponse({'success': False, 'message': 'Enter the correct quantity', 'remaining': item_remaining, 'quantity': item_quantity})
else:
print("item nai ar")
return JsonResponse({'success': False, 'message': 'The items quantity has already been subtracted'})
else:
print("product nai")
return JsonResponse({'success': False, 'message': 'The item does not exist'})
elif shop_id is None:
print("warehouse ase")
inventory_id = warehouse_id
print(inventory_id)
try:
product = OrderDetails.objects.get(id=order_details_id)
except:
product = None
if product:
item_quantity = product.total_quantity
item_remaining = product.remaining
if item_remaining > 0:
# make the subtraction
check = item_remaining - quantity
if check >= 0:
print("quantity thik dise")
product.remaining -= quantity
product.save()
item_remaining = product.remaining
item_quantity = product.quantity
try:
warehouse = Warehouse.objects.get(id=warehouse_id)
except:
warehouse = None
if warehouse:
warehouse.product_quantity -= quantity
warehouse.save()
warehouse_serializer = WareHouseSerializer(
warehouse, many=False)
warehouse_data = warehouse_serializer.data
else:
warehouse_data = {}
return JsonResponse({'success': True, 'message': 'The amount has been subtracted', 'remaining': item_remaining, 'quantity': item_quantity, 'warehouse_data': warehouse_data})
else:
print("quantity thik dey nai")
return JsonResponse({'success': False, 'message': 'Enter the correct quantity', 'remaining': item_remaining, 'quantity': item_quantity})
else:
print("product er item nai")
return JsonResponse({'success': False, 'message': 'The items quantity has already been subtracted'})
else:
print("item tai nai")
return JsonResponse({'success': False, 'message': 'The item does not exist'})
@api_view(["POST", ])
def subtract_items(request, order_details_id):
# data= {"warehouse": [
# {
# "id": 1,
# "name": "WarehouseA",
# "location": "Dhanmondi",
# "subtract": 10
# },
# {
# "id": 2,
# "name": "WarehouseB",
# "location": "Gulshan",
# "subtract": 10
# }
# ],
# "shop": [
# {
# "id": 1,
# "name": "ShopB",
# "location": "gulshan",
# "subtract": 10
# },
# {
# "id": 2,
# "name": "ShopA",
# "location": "Banani",
# "subtract": 10
# }
# ]
# }
data = request.data
current_date = date.today()
print(data)
# print(data["warehouse"])
# print(len(data["warehouse"]))
# print(data["shop"])
# print(len(data["warehouse"]))
# print(data["warehouse"][0]["warehouse_id"])
warehouse_data = data["warehouse"]
shop_data = data["shop"]
# print(warehouse_data)
# print(len(warehouse_data))
# print(warehouse_data[1]["warehouse_id"])
# This is for the warehouse data
try:
item = OrderDetails.objects.get(id=order_details_id)
except:
item = None
if item:
# Checking if any item has been subtracted from the warehouse
item_remaining = item.remaining
item_product_id = item.product_id
item_color = item.product_color
item_size = item.product_size
item_weight = item.product_weight
item_unit = item.product_unit
product_id = item.product_id
specification_id = item.specification_id
order_id = item.order_id
print(item_remaining)
try:
spec = ProductSpecification.objects.get(id=specification_id)
except:
spec = None
if spec:
specification_id = spec.id
else:
specification_id = 0
#Fetching the purchase price and selling price
try:
price = ProductPrice.objects.filter(specification_id=specification_id).last()
except:
price = None
print(price)
if price:
if price.price:
selling_price = price.price
else:
selling_price = 0.0
if price.purchase_price:
purchase_price = price.purchase_price
else:
purchase_price = 0.0
else:
selling_price = 0.0
purchase_price = 0.0
print(purchase_price)
print(selling_price)
if int(len(warehouse_data)) > 0:
# looping through the warehouse items
for i in range(int(len(warehouse_data))):
if item_remaining > 0:
# fetch the warehouseinfo
warehouse_id = warehouse_data[i]["id"]
subtract = int(warehouse_data[i]["subtract"])
try:
warehouse_info = WarehouseInfo.objects.filter(
warehouse_id=warehouse_id, specification_id=specification_id).last()
except:
warehouse_info = None
if warehouse_info:
if warehouse_info.quantity >= subtract:
warehouse_info.quantity -= subtract
warehouse_info.save()
item.remaining -= subtract
item.save()
item_remaining = item.remaining
#make the entries in the tracking table
tracking_table = subtraction_track.objects.create(specification_id=specification_id,order_id=order_id,warehouse_id=warehouse_id,debit_quantity=subtract,date=current_date)
tracking_table.save()
#make the transaction entries
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.requested += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date,requested=subtract,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
if item_remaining == 0:
item.admin_status = "Approved"
item.save()
item_serializer = OrderDetailsSerializer(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "This product is approved", "data": data})
else:
return JsonResponse({"success": False, "message": "The warehouse does not have enough of this item"})
else:
return JsonResponse({"success": False, "message": "The warehouse does not have enough of this item"})
# elif item_remaining==0:
# return JsonResponse({"success":True,"message":"This product is approved"})
else:
return JsonResponse({"success": False, "message": "These many items dont exist in this order"})
else:
pass
if int(len(shop_data)) > 0:
# looping through the warehouse items
for i in range(int(len(shop_data))):
print("loop er moddhe dhuklam")
if item_remaining > 0:
print("shop item_remaining ase")
# fetch the warehouseinfo
shop_id = shop_data[i]["id"]
subtract = int(shop_data[i]["subtract"])
try:
shop_info = ShopInfo.objects.filter(
shop_id=shop_id, specification_id=specification_id).last()
except:
shop_info = None
if shop_info:
if shop_info.quantity >= subtract:
shop_info.quantity -= subtract
shop_info.save()
print("shoper aager")
print(item_remaining)
item.remaining -= subtract
item.save()
item_remaining = item.remaining
print("shop er porer")
print(item_remaining)
#Inserting the track infos
tracking_table = subtraction_track.objects.create(specification_id=specification_id,order_id=order_id,shop_id=shop_id,debit_quantity=subtract,date=current_date)
tracking_table.save()
#make the transaction entries
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,shop_id=shop_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.requested += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,shop_id=shop_id,date=current_date,requested=subtract,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
if item_remaining == 0:
item.admin_status = "Approved"
item.save()
item_serializer = OrderDetailsSerializer(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "This product is approved", "data": data})
return JsonResponse({"success": True, "message": "This product is approved"})
else:
return JsonResponse({"success": False, "message": "The shop does not have enough of this item"})
else:
return JsonResponse({"success": False, "message": "The shop does not have enough of this item"})
# elif item_remaining==0:
# return JsonResponse({"success":True,"message":"This product is approved"})
else:
return JsonResponse({"success": False, "message": "These many items dont exist in this order"})
else:
pass
else:
JsonResponse(
{"success": False, "message": "The item is not in that order"})
@api_view(["POST", ])
def subtract_spec_quantity(request, specification_id):
print("specification_id")
print(specification_id)
# data= {"warehouse": [
# {
# "warehouse_id": 1,
# "name": "WarehouseA",
# "location": "Dhanmondi",
# "subtract": 5
# },
# {
# "warehouse_id": 2,
# "name": "WarehouseB",
# "location": "Gulshan",
# "subtract": 3
# }
# ],
# "shop": [
# {
# "shop_id": 1,
# "name": "ShopB",
# "location": "gulshan",
# "subtract": 2
# },
# {
# "shop_id": 2,
# "name": "ShopA",
# "location": "Banani",
# "subtract": 1
# }
# ]
# }
data = request.data
current_date = date.today()
print(data)
# print(data["warehouse"])
# print(len(data["warehouse"]))
# print(data["shop"])
# print(len(data["warehouse"]))
# print(data["warehouse"][0]["warehouse_id"])
warehouse_data = data["warehouse"]
shop_data = data["shop"]
# print(warehouse_data)
# print(len(warehouse_data))
# print(warehouse_data[1]["warehouse_id"])
# This is for the warehouse data
try:
item = ProductSpecification.objects.get(id=specification_id)
except:
item = None
print('item')
print(item)
print(item.id)
print(item.remaining)
if item:
# Checking if any item has been subtracted from the warehouse
item_remaining = item.remaining
# item_product_id = item.product_id
# item_color = item.product_color
# item_size = item.product_size
# item_weight = item.product_weight
# item_unit = item.product_unit
product_id = item.product_id
# specification_id = item.specification_id
# try:
# spec = ProductSpecification.objects.get(id=specification_id)
# except:
# spec = None
# if spec:
# specification_id = spec.id
# else:
# specification_id = 0
print(item_remaining)
if int(len(warehouse_data)) > 0:
# looping through the warehouse items
for i in range(int(len(warehouse_data))):
if item_remaining > 0:
# fetch the warehouseinfo
warehouse_id = warehouse_data[i]["warehouse_id"]
subtract = int(warehouse_data[i]["subtract"])
#Checking if warehouse exists
try:
warehouse_info = WarehouseInfo.objects.get(
warehouse_id=warehouse_id, specification_id=specification_id)
except:
warehouse_info = None
if warehouse_info:
warehouse_info.quantity += subtract
warehouse_info.save()
item.remaining -= subtract
item.save()
item_remaining = item.remaining
#make the transaction entries
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.debit += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date,credit=subtract)
new_report.save()
if item_remaining == 0:
# item.admin_status = "Approved"
# item.save()
item_serializer = ProductSpecificationSerializer1(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "All the quantities have been subtracted", "data": data})
else:
#Create a new warehouse
warehouse_info = WarehouseInfo.objects.create(product_id=product_id,warehouse_id=warehouse_id,specification_id=specification_id,quantity=subtract)
warehouse_info.save()
item.remaining -= subtract
item.save()
item_remaining = item.remaining
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.debit += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,warehouse_id=warehouse_id,date=current_date,credit=subtract)
new_report.save()
if item_remaining == 0:
# item.admin_status = "Approved"
# item.save()
item_serializer = ProductSpecificationSerializer1(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "All the quantities have been added", "data": data})
# elif item_remaining==0:
# return JsonResponse({"success":True,"message":"This product is approved"})
else:
return JsonResponse({"success": False, "message": "These many items dont exist"})
else:
pass
if int(len(shop_data)) > 0:
# looping through the warehouse items
for i in range(int(len(shop_data))):
if item_remaining > 0:
# fetch the warehouseinfo
shop_id = shop_data[i]["shop_id"]
subtract = int(shop_data[i]["subtract"])
#Checking if warehouse exists
try:
shop_info = ShopInfo.objects.get(
shop_id=shop_id, specification_id=specification_id)
except:
shop_info = None
if shop_info:
shop_info.quantity += subtract
shop_info.save()
item.remaining -= subtract
item.save()
item_remaining = item.remaining
#make the transaction entries
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,shop_id=shop_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.debit += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,shop_id=warehouse_id,date=current_date,credit=subtract)
new_report.save()
if item_remaining == 0:
# item.admin_status = "Approved"
# item.save()
item_serializer = ProductSpecificationSerializer1(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "All the quantities have been subtracted", "data": data})
else:
#Create a new warehouse
warehouse_info = ShopInfo.objects.create(product_id=product_id,shop_id=shop_id,specification_id=specification_id,quantity=subtract)
warehouse_info.save()
item.remaining -= subtract
item.save()
item_remaining = item.remaining
# try:
# report = inventory_report.objects.get(product_id= product_id,specification_id= specification_id,shop_id=shop_id,date=current_date)
# except:
# report = None
# if report:
# #Update the existing report
# report.debit += subtract
# report.save()
# else:
# #Create a new row
new_report = inventory_report.objects.create(product_id= product_id,specification_id= specification_id,shop_id=warehouse_id,date=current_date,credit=subtract)
new_report.save()
if item_remaining == 0:
# item.admin_status = "Approved"
# item.save()
item_serializer = ProductSpecificationSerializer1(
item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "All the quantities have been added", "data": data})
# elif item_remaining==0:
# return JsonResponse({"success":True,"message":"This product is approved"})
else:
return JsonResponse({"success": False, "message": "These many items dont exist"})
else:
pass
# @api_view(["POST", ])
# def admin_approval(request, order_id):
# flag = 0
# try:
# specific_order = Order.objects.get(id=order_id)
# except:
# specific_order = None
# if specific_order:
# orderid = specific_order.id
# order_details = OrderDetails.objects.filter(order_id=orderid)
# order_details_ids = list(
# order_details.values_list('id', flat=True).distinct())
# print(order_details_ids)
# for i in range(len(order_details_ids)):
# print("ashtese")
# try:
# specific_order_details = OrderDetails.objects.get(
# id=order_details_ids[i])
# except:
# specific_order_details = None
# if specific_order_details:
# remaining_items = specific_order_details.remaining
# if remaining_items != 0:
# flag = 1
# break
# else:
# flag = 0
# if flag == 0:
# specific_order.admin_status = "Confirmed"
# specific_order.save()
# # Create a invoice
# data = {'order_id': order_id}
# invoice_serializer = InvoiceSerializer(data=data)
# if invoice_serializer.is_valid():
# invoice_serializer.save()
# return JsonResponse({'success': True, 'message': 'The order has been approved'})
# else:
# return JsonResponse({'success': False, 'message': 'Please ensure where to remove the items from'})
# else:
# return JsonResponse({'success': False, 'message': 'The order does not exist'})
# @api_view(["POST",])
# def admin_approval(request,order_id):
# flag = 0
# try:
# specific_order = Order.objects.get(id=order_id)
# except:
# specific_order = None
# if specific_order:
# orderid = specific_order.id
# order_details = OrderDetails.objects.filter(order_id=orderid)
# order_details_ids = list(order_details.values_list('id',flat=True).distinct())
# print(order_details_ids)
# for i in range(len(order_details_ids)):
# print("ashtese")
# try:
# specific_order_details = OrderDetails.objects.get(id=order_details_ids[i])
# except:
# specific_order_details = None
# if specific_order_details:
# remaining_items = specific_order_details.remaining
# if remaining_items != 0 :
# flag = 1
# break
# else:
# flag = 0
# if flag == 0:
# specific_order.admin_status = "Confirmed"
# specific_order.save()
# return JsonResponse({'success':True,'message':'The order has been approved'})
# else:
# return JsonResponse({'success':False,'message':'Please ensure where to remove the items from'})
# else:
# return JsonResponse({'success':False,'message':'The order does not exist'})
# @api_view(["GET", ])
# def admin_approval(request, order_id):
# try:
# specific_order = Order.objects.get(id=order_id)
# except:
# specific_order = None
# if specific_order:
# specific_order.admin_status = "Confirmed"
# specific_order.save()
# order_serializer = OrderSerializer(specific_order, many=False)
# data = order_serializer.data
# # Create a invoice
# data = {'order_id':order_id, 'ref_invoice':0, 'is_active':True}
# invoice_serializer = InvoiceSerializer(data=data)
# if invoice_serializer.is_valid():
# invoice_serializer.save()
# return JsonResponse({"success": True, "message": "The order has been approved", "data": data})
# else:
# return JsonResponse({"success": False, "message": "This order does not exist"})
@api_view(["GET", ])
def admin_approval(request, order_id):
approval_flag = True
try:
company= CompanyInfo.objects.all()
except:
company = None
if company:
company = company[0]
site_id = company.site_identification
else:
site_id = ""
print("site_ud")
print(site_id)
try:
specific_order = Order.objects.get(id=order_id)
except:
specific_order = None
if specific_order:
is_mother = specific_order.is_mother
if is_mother == True:
print("mother er product")
specific_order.admin_status = "Confirmed"
specific_order.save()
order_serializer = OrderSerializer(specific_order, many=False)
order_data = order_serializer.data
main_data = {"order_data":order_data,"site_id":site_id}
print("MAIN DATA")
print(main_data)
# Create a selling invoice
data = {'order_id':order_id, 'ref_invoice':0, 'is_active':True}
invoice_serializer = InvoiceSerializer(data=data)
if invoice_serializer.is_valid():
invoice_serializer.save()
invoice_id = invoice_serializer.data["id"]
#Create a purchase invoice
spec_dataz = json.dumps(main_data)
url = site_path + "Cart/create_childsite_orders_purchase_invoice/"
headers = {'Content-Type': 'application/json',}
dataz = requests.post(url = url, headers=headers,data = spec_dataz)
data_response = str(dataz)
if data_response == "<Response [200]>":
dataz = dataz.json()
print("JANI NAAAAA")
print(dataz["success"])
print(dataz["message"])
if dataz["success"] == True:
return JsonResponse({"success":True,'message':'Order has been approved.Mother site response was successful.Invoice has been created'})
else:
try:
specific_invoice = Invoice.objects.get(id=invoice_id)
except:
specific_invoice = None
if specific_invoice:
specific_invoice.delete()
specific_order.admin_status = "Pending"
specific_order.save()
return JsonResponse({"success": False,'message':'Order could not be approved.Mother site response was insuccessful.'})
else:
try:
specific_invoice = Invoice.objects.get(id=invoice_id)
except:
specific_invoice = None
if specific_invoice:
specific_invoice.delete()
specific_order.admin_status = "Pending"
specific_order.save()
return JsonResponse({"success": False,'message':'Order could not be approved.Mother site did not respond.'})
else:
specific_order.admin_status = "Pending"
specific_order.save()
return JsonResponse({"success":False, "message":"The order could not be approved since invoice could not be created"})
else:
try:
order_details = OrderDetails.objects.filter(order_id = order_id)
except:
order_details = None
if order_details:
order_details_ids = list(order_details.values_list('id',flat=True))
is_owns = list(order_details.values_list('is_own',flat=True))
admin_statuses = list(order_details.values_list('admin_status',flat=True))
for i in range (len(order_details_ids)):
if is_owns[i] == True:
if admin_statuses[i] == "Pending":
approval_flag = False
break
else:
pass
else:
pass
if approval_flag == True:
specific_order.admin_status = "Confirmed"
specific_order.save()
order_serializer = OrderSerializer(specific_order, many=False)
data = order_serializer.data
# Create a invoice
data = {'order_id':order_id, 'ref_invoice':0, 'is_active':True}
invoice_serializer = InvoiceSerializer(data=data)
if invoice_serializer.is_valid():
invoice_serializer.save()
else:
specific_order.admin_status = "Processing"
specific_order.save()
return JsonResponse({"success":False, "message":"The order could not be approved since invoice could not be created"})
return JsonResponse({"success": True, "message": "The order has been approved", "data": data})
else:
return JsonResponse({"success":False,"message":"The order cannot be approved.There are still pending items in the order."})
else:
return JsonResponse({"success":False,"message":"The order cannot be approved.There are no items in this order"})
else:
return JsonResponse({"success": False, "message": "This order does not exist"})
@api_view(["GET", ])
def admin_cancellation(request, order_id):
try:
specific_order = Order.objects.get(id=order_id)
except:
specific_order = None
if specific_order:
specific_order.admin_status = "Cancelled"
specific_order.save()
order_id = specific_order.id
try:
items = OrderDetails.objects.filter(order_id=order_id)
except:
items = None
if items:
item_ids = list(items.values_list('id',flat=True).distinct())
for k in range(len(item_ids)):
try:
specific_item = OrderDetails.objects.get(id=item_ids[k])
except:
specific_item = None
if specific_item:
specific_item.admin_status = "Cancelled"
specific_item.order_status = "Cancelled"
specific_item.delivery_status = "Cancelled"
specific_item.save()
else:
pass
order_serializer = OrderSerializer(specific_order, many=False)
data = order_serializer.data
return JsonResponse({"success": True, "message": "The order has been approved", "data": data})
else:
return JsonResponse({"success": False, "message": "This order does not exist"})
@api_view(["GET", ])
def item_cancellation(request, order_details_id):
try:
item = OrderDetails.objects.get(id=order_details_id)
except:
item = None
if item:
item.admin_status = "Cancelled"
item.save()
item_serializer = OrderDetailsSerializer(item, many=False)
data = item_serializer.data
return JsonResponse({"success": True, "message": "The status has been changed", "data": data})
else:
return JsonResponse({"success": False, "message": "This item does not exist"})
# @api_view(['POST', ])
# def add_spec(request, product_id):
# current_date = date.today()
# specification_data_value = {
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty': request.data.get("warranty"),
# 'warranty_unit': request.data.get("warranty_unit"),
# 'unit': request.data.get("product_unit"),
# 'vat': request.data.get("vat"),
# }
# product_price = {
# 'product_id': product_id,
# 'price': request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# # 'currency_id': request.data.get('currency_id')
# }
# product_code = {
# 'product_id': product_id
# }
# discount_type = request.data.get("discount_type")
# discount_amount = request.data.get("discount_amount")
# discount_start_date = request.data.get("discount_start_date")
# discount_end_date = request.data.get("discount_end_date")
# point_amount = request.data.get("point_amount")
# point_start_date = request.data.get("point_start_date")
# point_end_date = request.data.get("point_end_date")
# if discount_type == "none" or discount_amount == "" or discount_start_date == "" or discount_end_date == "":
# discount_flag = False
# else:
# discount_flag = True
# if point_amount == "" or point_start_date == "" or point_end_date == "":
# point_flag = False
# else:
# point_flag = True
# product_discount = {
# 'product_id': product_id,
# 'amount': request.data.get("discount_amount"),
# 'discount_type': request.data.get("discount_type"),
# 'start_date': request.data.get("discount_start_date"),
# # 'end_date' : data['discount_end_date']
# 'end_date': request.data.get("discount_end_date")
# }
# product_point = {
# 'product_id': product_id,
# 'point': request.data.get("point_amount"),
# # 'end_date': data['point_end_date']
# 'start_date': request.data.get("point_start_date"),
# 'end_date': request.data.get("point_end_date")
# }
# delivery_info = {
# 'height': request.data.get("delivery_height"),
# 'width': request.data.get("delivery_width"),
# 'length': request.data.get("delivery_length"),
# 'weight': request.data.get("delivery_weight"),
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': request.data.get("delivery_inside_city_charge"),
# 'charge_outside': request.data.get("delivery_outside_city_charge"),
# }
# if request.method == 'POST':
# delivery_id = 0
# discount_id = 0
# point_id = 0
# price_id = 0
# specification_id = 0
# flag = 0
# spec = {}
# price = {}
# discount = {}
# point = {}
# delivery = {}
# code = {}
# try:
# product_spec = ProductSpecificationSerializerz(
# data=specification_data_value)
# if product_spec.is_valid():
# product_spec.save()
# print("spec save hoise")
# spec.update(product_spec.data)
# print("Specification_id")
# specification_id = spec["id"]
# else:
# # print(product_spec.errors)
# specification_id = 0
# flag = flag+1
# product_price.update({'specification_id': spec['id']})
# print("fbwhefygbfywegbfwgfb")
# print(product_price)
# product_price = ProductPriceSerializer(data=product_price)
# if product_price.is_valid():
# product_price.save()
# print("price save hochche")
# price.update(product_price.data)
# price_id = price["id"]
# else:
# price_id = 0
# flag = flag+1
# if discount_flag == False:
# discount = {}
# else:
# product_discount.update({'specification_id': spec['id']})
# print("product_discount")
# print(product_discount)
# product_dis = ProductDiscountSerializer(data=product_discount)
# if product_dis.is_valid():
# product_dis.save()
# print("savwe hochche")
# discount.update(product_dis.data)
# discount_id = discount["id"]
# else:
# discount_id = 0
# flag = flag+1
# if point_flag == False:
# point = {}
# else:
# product_point.update({'specification_id': spec['id']})
# product_point_value = ProductPointSerializer(
# data=product_point)
# if product_point_value.is_valid():
# product_point_value.save()
# print("point save")
# point.update(product_point_value.data)
# point_id = point["id"]
# else:
# point_id = 0
# print(product_point_value.errors)
# flag = flag+1
# delivery_info.update({'specification_id': spec['id']})
# # print("here delivery",delivery_info )
# delivery_value = DeliveryInfoSerializer(data=delivery_info)
# # print("serializer",delivery_value)
# if delivery_value.is_valid():
# # print("here")
# delivery_value.save()
# delivery.update(delivery_value.data)
# delivery_id = delivery["id"]
# else:
# delivery_id = 0
# print(delivery_value.errors)
# flag = flag+1
# product_code.update({'specification_id':spec['id']})
# print("product point",product_code )
# product_code_value= ProductCodeSerializer (data=product_code)
# if product_code_value.is_valid():
# product_code_value.save()
# print("code is saved")
# code.update(product_code_value.data)
# code_id = code["id"]
# else:
# print("code error", product_code_value.errors)
# flag= flag+1
# if flag > 0:
# print("xxxxxxxxxxxxxxx")
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!",
# })
# else:
# return JsonResponse({
# "success": True,
# "message": "Specification data has been inserted Successfully",
# "specification": spec,
# "price": price,
# "discount": discount,
# "point": point,
# "delivery": delivery
# })
# except:
# print("yyyyyyyyyyyyyyyyyyy")
# try:
# spe = ProductSpecification.objects.get(id=specification_id)
# except:
# spe = None
# if spe:
# spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
# try:
# poi = ProductPoint.objects.get(id=point_id)
# except:
# poi = None
# if poi:
# poi.delete()
# try:
# dis = discount_product.objects.get(id=discount_id)
# except:
# dis = None
# if dis:
# dis.delete()
# try:
# deli = DeliveryInfo.objects.get(id=delivery_id)
# except:
# deli = None
# if deli:
# deli.delete()
# try:
# deli = ProductCode.objects.get(id=delivery_id)
# except:
# deli = None
# if deli:
# deli.delete()
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!"
# })
# @api_view(['POST', ])
# def add_spec(request, product_id):
# current_date = date.today()
# print(request.data)
# specification_data_value = {
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty': request.data.get("warranty"),
# 'warranty_unit': request.data.get("warranty_unit"),
# 'unit': request.data.get("product_unit"),
# 'vat': request.data.get("vat"),
# # 'seller_quantity': request.data.get("seller_quantity"),
# # 'remaining': request.data.get("seller_quantity"),
# 'manufacture_date': request.data.get("manufacture_date"),
# 'expire': request.data.get("expire")
# }
# product_price = {
# 'product_id': product_id,
# 'price': request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# # 'currency_id': request.data.get('currency_id')
# }
# discount_type = request.data.get("discount_type")
# discount_amount = request.data.get("discount_amount")
# discount_start_date = request.data.get("discount_start_date")
# discount_end_date = request.data.get("discount_end_date")
# point_amount = request.data.get("point_amount")
# point_start_date = request.data.get("point_start_date")
# point_end_date = request.data.get("point_end_date")
# if discount_type == "none" or discount_amount == "" or discount_start_date == "" or discount_end_date == "":
# discount_flag = False
# else:
# discount_flag = True
# if point_amount == "" or point_start_date == "" or point_end_date == "":
# point_flag = False
# else:
# point_flag = True
# product_discount = {
# 'product_id': product_id,
# 'amount': request.data.get("discount_amount"),
# 'discount_type': request.data.get("discount_type"),
# 'start_date': request.data.get("discount_start_date"),
# # 'end_date' : data['discount_end_date']
# 'end_date': request.data.get("discount_end_date")
# }
# product_point = {
# 'product_id': product_id,
# 'point': request.data.get("point_amount"),
# # 'end_date': data['point_end_date']
# 'start_date': request.data.get("point_start_date"),
# 'end_date': request.data.get("point_end_date")
# }
# delivery_info = {
# 'height': request.data.get("delivery_height"),
# 'width': request.data.get("delivery_width"),
# 'length': request.data.get("delivery_length"),
# 'weight': request.data.get("delivery_weight"),
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': request.data.get("delivery_inside_city_charge"),
# 'charge_outside': request.data.get("delivery_outside_city_charge"),
# }
# product_code = {
# 'product_id': product_id,
# 'manual_SKU' : request.data.get("sku")
# }
# if request.method == 'POST':
# delivery_id = 0
# discount_id = 0
# point_id = 0
# price_id = 0
# specification_id = 0
# flag = 0
# spec = {}
# price = {}
# discount = {}
# point = {}
# delivery = {}
# code={}
# try:
# product_spec = ProductSpecificationSerializerz(
# data=specification_data_value)
# if product_spec.is_valid():
# product_spec.save()
# # print("888888888888888888 spec save hoise")
# spec.update(product_spec.data)
# # print("Specification_id", spec["id"])
# specification_id = spec["id"]
# else:
# # print(product_spec.errors)
# specification_id = 0
# flag = flag+1
# product_price.update({'specification_id': spec['id']})
# product_price = ProductPriceSerializer(data=product_price)
# if product_price.is_valid():
# product_price.save()
# # print("price save hochche")
# price.update(product_price.data)
# price_id = price["id"]
# else:
# price_id = 0
# flag = flag+1
# if discount_flag == False:
# discount = {}
# else:
# product_discount.update({'specification_id': spec['id']})
# # print("product_discount")
# # print(product_discount)
# product_dis = ProductDiscountSerializer(data=product_discount)
# if product_dis.is_valid():
# product_dis.save()
# # print("savwe hochche")
# discount.update(product_dis.data)
# discount_id = discount["id"]
# else:
# discount_id = 0
# flag = flag+1
# if point_flag == False:
# point = {}
# else:
# product_point.update({'specification_id': spec['id']})
# product_point_value = ProductPointSerializer(
# data=product_point)
# if product_point_value.is_valid():
# product_point_value.save()
# # print("point save")
# point.update(product_point_value.data)
# point_id = point["id"]
# else:
# point_id = 0
# # print(product_point_value.errors)
# flag = flag+1
# delivery_info.update({'specification_id': spec['id']})
# # print("here delivery",delivery_info )
# delivery_value = DeliveryInfoSerializer(data=delivery_info)
# # print("serializer",delivery_value)
# if delivery_value.is_valid():
# # print("Inside the delivery ")
# delivery_value.save()
# # print("delivery is saved")
# delivery.update(delivery_value.data)
# delivery_id = delivery["id"]
# else:
# delivery_id = 0
# # print("errors delivery " ,delivery_value.errors)
# flag = flag+1
# product_code.update({'specification_id':spec['id']})
# # print("product point",product_code )
# product_code_value= ProductCodeSerializer (data=product_code)
# # print("product code serial", product_code_value)
# # print("before validation")
# if product_code_value.is_valid():
# # print("inside validation")
# product_code_value.save()
# # print("code is saved", product_code_value.data)
# code.update(product_code_value.data)
# # print("update code info",code )
# code_id = code["id"]
# # print("code id", code_id)
# else:
# # print("code error", product_code_value.errors)
# flag= flag+1
# if flag > 0:
# # print("xxxxxxxxxxxxxxx")
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!",
# })
# else:
# return JsonResponse({
# "success": True,
# "message": "Specification data has been inserted Successfully",
# "specification": spec,
# "price": price,
# "discount": discount,
# "point": point,
# "delivery": delivery
# })
# except:
# try:
# spe = ProductSpecification.objects.get(id=specification_id)
# except:
# spe = None
# if spe:
# spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
# try:
# poi = ProductPoint.objects.get(id=point_id)
# except:
# poi = None
# if poi:
# poi.delete()
# try:
# dis = discount_product.objects.get(id=discount_id)
# except:
# dis = None
# if dis:
# dis.delete()
# try:
# deli = DeliveryInfo.objects.get(id=delivery_id)
# except:
# deli = None
# if deli:
# deli.delete()
# try:
# deli = ProductCode.objects.get(id=code_id)
# except:
# deli = None
# if deli:
# deli.delete()
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!"
# })
#
# @api_view(['POST', ])
# def add_spec(request, product_id):
# current_date = date.today()
# specification_data_value = {
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty': request.data.get("warranty"),
# 'warranty_unit': request.data.get("warranty_unit"),
# 'unit': request.data.get("product_unit"),
# 'vat': request.data.get("vat"),
# # 'seller_quantity': request.data.get("seller_quantity"),
# # 'remaining': request.data.get("seller_quantity"),
# 'manufacture_date': request.data.get("manufacture_date"),
# 'expire': request.data.get("expire")
# }
# product_price = {
# 'product_id': product_id,
# 'price': request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# # 'currency_id': request.data.get('currency_id')
# }
# discount_type = request.data.get("discount_type")
# discount_amount = request.data.get("discount_amount")
# discount_start_date = request.data.get("discount_start_date")
# discount_end_date = request.data.get("discount_end_date")
# point_amount = request.data.get("point_amount")
# point_start_date = request.data.get("point_start_date")
# point_end_date = request.data.get("point_end_date")
# if discount_type == "none" or discount_amount == "" or discount_start_date == "" or discount_end_date == "":
# discount_flag = False
# else:
# discount_flag = True
# if point_amount == "" or point_start_date == "" or point_end_date == "":
# point_flag = False
# else:
# point_flag = True
# product_discount = {
# 'product_id': product_id,
# 'amount': request.data.get("discount_amount"),
# 'discount_type': request.data.get("discount_type"),
# 'start_date': request.data.get("discount_start_date"),
# # 'end_date' : data['discount_end_date']
# 'end_date': request.data.get("discount_end_date")
# }
# product_point = {
# 'product_id': product_id,
# 'point': request.data.get("point_amount"),
# # 'end_date': data['point_end_date']
# 'start_date': request.data.get("point_start_date"),
# 'end_date': request.data.get("point_end_date")
# }
# delivery_info = {
# 'height': request.data.get("delivery_height"),
# 'width': request.data.get("delivery_width"),
# 'length': request.data.get("delivery_length"),
# 'weight': request.data.get("delivery_weight"),
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': request.data.get("delivery_inside_city_charge"),
# 'charge_outside': request.data.get("delivery_outside_city_charge"),
# }
# product_code = {
# 'product_id': product_id,
# 'manual_SKU' : request.data.get("SKU"),
# 'uid': request.data.get("uid"),
# }
# if request.method == 'POST':
# delivery_id = 0
# discount_id = 0
# point_id = 0
# price_id = 0
# specification_id = 0
# flag = 0
# spec = {}
# price = {}
# discount = {}
# point = {}
# delivery = {}
# code={}
# try:
# product_spec = ProductSpecificationSerializerz(
# data=specification_data_value)
# if product_spec.is_valid():
# product_spec.save()
# # print("888888888888888888 spec save hoise")
# spec.update(product_spec.data)
# # print("Specification_id", spec["id"])
# specification_id = spec["id"]
# else:
# # print(product_spec.errors)
# specification_id = 0
# flag = flag+1
# product_price.update({'specification_id': spec['id']})
# product_price = ProductPriceSerializer(data=product_price)
# if product_price.is_valid():
# product_price.save()
# # print("price save hochche")
# price.update(product_price.data)
# price_id = price["id"]
# else:
# price_id = 0
# flag = flag+1
# if discount_flag == False:
# discount = {}
# else:
# product_discount.update({'specification_id': spec['id']})
# # print("product_discount")
# # print(product_discount)
# product_dis = ProductDiscountSerializer(data=product_discount)
# if product_dis.is_valid():
# product_dis.save()
# # print("savwe hochche")
# discount.update(product_dis.data)
# discount_id = discount["id"]
# else:
# discount_id = 0
# flag = flag+1
# if point_flag == False:
# point = {}
# else:
# product_point.update({'specification_id': spec['id']})
# product_point_value = ProductPointSerializer(
# data=product_point)
# if product_point_value.is_valid():
# product_point_value.save()
# # print("point save")
# point.update(product_point_value.data)
# point_id = point["id"]
# else:
# point_id = 0
# # print(product_point_value.errors)
# flag = flag+1
# delivery_info.update({'specification_id': spec['id']})
# # print("here delivery",delivery_info )
# delivery_value = DeliveryInfoSerializer(data=delivery_info)
# # print("serializer",delivery_value)
# if delivery_value.is_valid():
# # print("Inside the delivery ")
# delivery_value.save()
# # print("delivery is saved")
# delivery.update(delivery_value.data)
# delivery_id = delivery["id"]
# else:
# delivery_id = 0
# # print("errors delivery " ,delivery_value.errors)
# flag = flag+1
# product_code.update({'specification_id':spec['id']})
# # print("product point",product_code )
# product_code_value= ProductCodeSerializer (data=product_code)
# # print("product code serial", product_code_value)
# # print("before validation")
# if product_code_value.is_valid():
# # print("inside validation")
# product_code_value.save()
# # print("code is saved", product_code_value.data)
# code.update(product_code_value.data)
# create_product_code(product_code)
# code_id = code["id"]
# # print("code id", code_id)
# else:
# # print("code error", product_code_value.errors)
# flag= flag+1
# if flag > 0:
# # print("xxxxxxxxxxxxxxx")
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!",
# })
# else:
# return JsonResponse({
# "success": True,
# "message": "Specification data has been inserted Successfully",
# "specification": spec,
# "price": price,
# "discount": discount,
# "point": point,
# "delivery": delivery
# })
# except:
# try:
# spe = ProductSpecification.objects.get(id=specification_id)
# except:
# spe = None
# if spe:
# spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
# try:
# poi = ProductPoint.objects.get(id=point_id)
# except:
# poi = None
# if poi:
# poi.delete()
# try:
# dis = discount_product.objects.get(id=discount_id)
# except:
# dis = None
# if dis:
# dis.delete()
# try:
# deli = DeliveryInfo.objects.get(id=delivery_id)
# except:
# deli = None
# if deli:
# deli.delete()
# try:
# deli = ProductCode.objects.get(id=code_id)
# except:
# deli = None
# if deli:
# deli.delete()
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!"
# })
@api_view(['POST', ])
def add_spec2(request, product_id):
current_date = date.today()
specification_data_value = {
'product_id': product_id,
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': request.data.get("vat"),
'seller_quantity': request.data.get("seller_quantity"),
'remaining': request.data.get("seller_quantity"),
'manufacture_date': request.data.get("manufacture_date"),
'expire': request.data.get("expire"),
'is_own' :True
}
product_price = {
'product_id': product_id,
'price': request.data.get("price"),
'purchase_price': request.data.get("purchase_price"),
# 'currency_id': request.data.get('currency_id')
}
discount_type = request.data.get("discount_type")
discount_amount = request.data.get("discount_amount")
discount_start_date = request.data.get("discount_start_date")
discount_end_date = request.data.get("discount_end_date")
point_amount = request.data.get("point_amount")
point_start_date = request.data.get("point_start_date")
point_end_date = request.data.get("point_end_date")
if discount_type == "none" or discount_amount == "" or discount_start_date == "" or discount_end_date == "":
discount_flag = False
else:
discount_flag = True
if point_amount == "" or point_start_date == "" or point_end_date == "":
point_flag = False
else:
point_flag = True
product_discount = {
'product_id': product_id,
'amount': request.data.get("discount_amount"),
'discount_type': request.data.get("discount_type"),
'start_date': request.data.get("discount_start_date"),
# 'end_date' : data['discount_end_date']
'end_date': request.data.get("discount_end_date")
}
product_point = {
'product_id': product_id,
'point': request.data.get("point_amount"),
# 'end_date': data['point_end_date']
'start_date': request.data.get("point_start_date"),
'end_date': request.data.get("point_end_date")
}
delivery_info = {
'height': request.data.get("delivery_height"),
'width': request.data.get("delivery_width"),
'length': request.data.get("delivery_length"),
'weight': request.data.get("delivery_weight"),
'measument_unit': request.data.get("delivery_product_unit"),
'charge_inside': request.data.get("delivery_inside_city_charge"),
'charge_outside': request.data.get("delivery_outside_city_charge"),
}
product_code = {
'product_id': product_id,
'manual_SKU' : request.data.get("SKU")
}
if request.method == 'POST':
delivery_id = 0
discount_id = 0
point_id = 0
price_id = 0
specification_id = 0
flag = 0
spec = {}
price = {}
discount = {}
point = {}
delivery = {}
code={}
try:
product_spec = ProductSpecificationSerializerz(
data=specification_data_value)
if product_spec.is_valid():
product_spec.save()
# print("888888888888888888 spec save hoise")
spec.update(product_spec.data)
# print("Specification_id", spec["id"])
specification_id = spec["id"]
else:
# print(product_spec.errors)
specification_id = 0
flag = flag+1
product_price.update({'specification_id': spec['id']})
product_price = ProductPriceSerializer(data=product_price)
if product_price.is_valid():
product_price.save()
# print("price save hochche")
price.update(product_price.data)
price_id = price["id"]
else:
price_id = 0
flag = flag+1
if discount_flag == False:
discount = {}
else:
product_discount.update({'specification_id': spec['id']})
# print("product_discount")
# print(product_discount)
product_dis = ProductDiscountSerializer(data=product_discount)
if product_dis.is_valid():
product_dis.save()
# print("savwe hochche")
discount.update(product_dis.data)
discount_id = discount["id"]
else:
discount_id = 0
flag = flag+1
if point_flag == False:
point = {}
else:
product_point.update({'specification_id': spec['id']})
product_point_value = ProductPointSerializer(
data=product_point)
if product_point_value.is_valid():
product_point_value.save()
# print("point save")
point.update(product_point_value.data)
point_id = point["id"]
else:
point_id = 0
# print(product_point_value.errors)
flag = flag+1
delivery_info.update({'specification_id': spec['id']})
# print("here delivery",delivery_info )
delivery_value = DeliveryInfoSerializer(data=delivery_info)
# print("serializer",delivery_value)
if delivery_value.is_valid():
# print("Inside the delivery ")
delivery_value.save()
# print("delivery is saved")
delivery.update(delivery_value.data)
delivery_id = delivery["id"]
else:
delivery_id = 0
# print("errors delivery " ,delivery_value.errors)
flag = flag+1
product_code.update({'specification_id':spec['id']})
# print("product point",product_code )
product_code_value= ProductCodeSerializer (data=product_code)
# print("product code serial", product_code_value)
# print("before validation")
if product_code_value.is_valid():
# print("inside validation")
product_code_value.save()
# print("code is saved", product_code_value.data)
code.update(product_code_value.data)
# print("update code info",code )
create_product_code(product_code)
code_id = code["id"]
# print("code id", code_id)
else:
# print("code error", product_code_value.errors)
flag= flag+1
if flag > 0:
# print("xxxxxxxxxxxxxxx")
return JsonResponse({
"success": False,
"message": "Something went wrong !!",
})
else:
return JsonResponse({
"success": True,
"message": "Specification data has been inserted Successfully",
"specification": spec,
"price": price,
"discount": discount,
"point": point,
"delivery": delivery
})
except:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
try:
pri = ProductPrice.objects.get(id=price_id)
except:
pri = None
if pri:
pri.delete()
try:
poi = ProductPoint.objects.get(id=point_id)
except:
poi = None
if poi:
poi.delete()
try:
dis = discount_product.objects.get(id=discount_id)
except:
dis = None
if dis:
dis.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
try:
deli = ProductCode.objects.get(id=code_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
# @api_view(['POST', ])
# def add_spec2(request, product_id):
# current_date = date.today()
# print(request.data)
# specification_data_value = {
# 'product_id': product_id,
# 'color': request.data.get("color"),
# 'size': request.data.get("size"),
# 'weight': request.data.get("weight"),
# 'warranty': request.data.get("warranty"),
# 'warranty_unit': request.data.get("warranty_unit"),
# 'unit': request.data.get("product_unit"),
# 'vat': request.data.get("vat"),
# 'seller_quantity': request.data.get("seller_quantity"),
# 'remaining': request.data.get("seller_quantity"),
# 'manufacture_date': request.data.get("manufacture_date"),
# 'expire': request.data.get("expire")
# }
# product_price = {
# 'product_id': product_id,
# 'price': request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# # 'currency_id': request.data.get('currency_id')
# }
# discount_type = request.data.get("discount_type")
# discount_amount = request.data.get("discount_amount")
# discount_start_date = request.data.get("discount_start_date")
# discount_end_date = request.data.get("discount_end_date")
# point_amount = request.data.get("point_amount")
# point_start_date = request.data.get("point_start_date")
# point_end_date = request.data.get("point_end_date")
# if discount_type == "none" or discount_amount == "" or discount_start_date == "" or discount_end_date == "":
# discount_flag = False
# else:
# discount_flag = True
# if point_amount == "" or point_start_date == "" or point_end_date == "":
# point_flag = False
# else:
# point_flag = True
# product_discount = {
# 'product_id': product_id,
# 'amount': request.data.get("discount_amount"),
# 'discount_type': request.data.get("discount_type"),
# 'start_date': request.data.get("discount_start_date"),
# # 'end_date' : data['discount_end_date']
# 'end_date': request.data.get("discount_end_date")
# }
# product_point = {
# 'product_id': product_id,
# 'point': request.data.get("point_amount"),
# # 'end_date': data['point_end_date']
# 'start_date': request.data.get("point_start_date"),
# 'end_date': request.data.get("point_end_date")
# }
# delivery_info = {
# 'height': request.data.get("delivery_height"),
# 'width': request.data.get("delivery_width"),
# 'length': request.data.get("delivery_length"),
# 'weight': request.data.get("delivery_weight"),
# 'measument_unit': request.data.get("delivery_product_unit"),
# 'charge_inside': request.data.get("delivery_inside_city_charge"),
# 'charge_outside': request.data.get("delivery_outside_city_charge"),
# }
# product_code = {
# 'product_id': product_id,
# 'manual_SKU' : request.data.get("sku")
# }
# if request.method == 'POST':
# delivery_id = 0
# discount_id = 0
# point_id = 0
# price_id = 0
# specification_id = 0
# flag = 0
# spec = {}
# price = {}
# discount = {}
# point = {}
# delivery = {}
# code={}
# try:
# product_spec = ProductSpecificationSerializerz(
# data=specification_data_value)
# if product_spec.is_valid():
# product_spec.save()
# # print("888888888888888888 spec save hoise")
# spec.update(product_spec.data)
# # print("Specification_id", spec["id"])
# specification_id = spec["id"]
# else:
# # print(product_spec.errors)
# specification_id = 0
# flag = flag+1
# product_price.update({'specification_id': spec['id']})
# product_price = ProductPriceSerializer(data=product_price)
# if product_price.is_valid():
# product_price.save()
# # print("price save hochche")
# price.update(product_price.data)
# price_id = price["id"]
# else:
# price_id = 0
# flag = flag+1
# if discount_flag == False:
# discount = {}
# else:
# product_discount.update({'specification_id': spec['id']})
# # print("product_discount")
# # print(product_discount)
# product_dis = ProductDiscountSerializer(data=product_discount)
# if product_dis.is_valid():
# product_dis.save()
# # print("savwe hochche")
# discount.update(product_dis.data)
# discount_id = discount["id"]
# else:
# discount_id = 0
# flag = flag+1
# if point_flag == False:
# point = {}
# else:
# product_point.update({'specification_id': spec['id']})
# product_point_value = ProductPointSerializer(
# data=product_point)
# if product_point_value.is_valid():
# product_point_value.save()
# # print("point save")
# point.update(product_point_value.data)
# point_id = point["id"]
# else:
# point_id = 0
# # print(product_point_value.errors)
# flag = flag+1
# delivery_info.update({'specification_id': spec['id']})
# # print("here delivery",delivery_info )
# delivery_value = DeliveryInfoSerializer(data=delivery_info)
# # print("serializer",delivery_value)
# if delivery_value.is_valid():
# # print("Inside the delivery ")
# delivery_value.save()
# # print("delivery is saved")
# delivery.update(delivery_value.data)
# delivery_id = delivery["id"]
# else:
# delivery_id = 0
# # print("errors delivery " ,delivery_value.errors)
# flag = flag+1
# product_code.update({'specification_id':spec['id']})
# # print("product point",product_code )
# product_code_value= ProductCodeSerializer (data=product_code)
# # print("product code serial", product_code_value)
# # print("before validation")
# if product_code_value.is_valid():
# # print("inside validation")
# product_code_value.save()
# # print("code is saved", product_code_value.data)
# code.update(product_code_value.data)
# # print("update code info",code )
# code_id = code["id"]
# # print("code id", code_id)
# else:
# # print("code error", product_code_value.errors)
# flag= flag+1
# if flag > 0:
# # print("xxxxxxxxxxxxxxx")
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!",
# })
# else:
# return JsonResponse({
# "success": True,
# "message": "Specification data has been inserted Successfully",
# "specification": spec,
# "price": price,
# "discount": discount,
# "point": point,
# "delivery": delivery
# })
# except:
# try:
# spe = ProductSpecification.objects.get(id=specification_id)
# except:
# spe = None
# if spe:
# spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
# try:
# poi = ProductPoint.objects.get(id=point_id)
# except:
# poi = None
# if poi:
# poi.delete()
# try:
# dis = discount_product.objects.get(id=discount_id)
# except:
# dis = None
# if dis:
# dis.delete()
# try:
# deli = DeliveryInfo.objects.get(id=delivery_id)
# except:
# deli = None
# if deli:
# deli.delete()
# try:
# deli = ProductCode.objects.get(id=code_id)
# except:
# deli = None
# if deli:
# deli.delete()
# return JsonResponse({
# "success": False,
# "message": "Something went wrong !!"
# })
@api_view(["GET", "POST"])
def confirm_products(request):
values = {
"order_id": 1,
"quantity": 2000000,
"store": "warehouse",
"ware_name": "sheba.xyz",
"ware_house_id": 1
}
if(request.method == "POST"):
ware_house = []
shops = []
flag = 0
reminder = -1
try:
order_info = OrderDetails.objects.filter(
order_id=values['order_id'])
for orders in order_info:
all_quantity_data = OrderDetails.objects.get(
product_id=orders.product_id, product_size=orders.product_size, product_color=orders.product_color)
specific_quantity = all_quantity_data.total_quantity
if(values['quantity'] > specific_quantity):
flag = flag+1
else:
print("specific quantity", specific_quantity)
if (values['store'] == "warehouse"):
ware_house_info = Warehouse.objects.get(
id=values['ware_house_id'])
quantity = ware_house_info.product_quantity
if(values['quantity'] > quantity):
flag = flag+1
else:
print("before add", ware_house_info.product_quantity)
ware_house_info.product_quantity = (
quantity - values['quantity'])
ware_house_info.save()
print("after add", ware_house_info.product_quantity)
reminder = specific_quantity-values['quantity']
elif (values['store'] == "shop"):
shop_house_info = Shop.objects.get(
id=values['ware_house_id'])
quantity = shop_house_info.product_quantity
if(values['quantity'] > quantity):
flag = flag+1
else:
shop_house_info.product_quantity = (
quantity - values['quantity'])
shop_house_info.save()
reminder = specific_quantity-values['quantity']
if(reminder < 0):
reminder = 0
except:
return Response({'Message': 'Check whether requested data exists or not'})
if (flag > 0):
return Response({
"success": False,
"Message": "You set wrong values !!"
})
else:
return Response({
"success": True,
"Message": "Information has been updated",
"reminder": reminder
})
@api_view(["POST", ])
def create_warehouse(request):
serializer = WarehouseSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"success": True, "message": "Warehouse has been created", "data": serializer.data})
else:
return Response({"success": True, "message": "Warehouse could not be created"})
@api_view(["POST", ])
def create_shop(request):
serializer = ShopSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"success": True, "message": "Shop has been created", "data": serializer.data})
else:
return Response({"success": True, "message": "Shop could not be created"})
@api_view(["POST", ])
def update_shop(request, shop_id):
try:
shop = Shop.objects.get(id=shop_id)
except:
shop = None
if shop:
serializer = ShopSerializer(shop, data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"success": True, "message": "Shop data has been updated", "data": serializer.data})
else:
return Response({"success": True, "message": "Shop data could not be updated"})
else:
return Response({"success": True, "message": "Shop does not exist"})
@api_view(["POST", ])
def update_warehouse(request, warehouse_id):
try:
warehouse = Warehouse.objects.get(id=warehouse_id)
except:
warehouse = None
if warehouse:
serializer = WarehouseSerializer(warehouse, data=request.data)
if serializer.is_valid():
serializer.save()
return Response({"success": True, "message": "Warehouse data has been updated", "data": serializer.data})
else:
return Response({"success": True, "message": "Warehouse data could not be updated"})
else:
return Response({"success": True, "message": "Warehouse does not exist"})
@api_view(["GET", ])
def show_all_warehouses(request):
try:
warehouse = Warehouse.objects.all()
except:
warehouse = None
if warehouse:
serializer = WarehouseSerializer(warehouse, many=True)
return Response({"success": True, "message": "Data is shown", "data": serializer.data})
else:
return Response({"success": False, "message": "No data could be retrieved", "data": []})
@api_view(["GET", ])
def show_all_shops(request):
try:
warehouse = Shop.objects.all()
except:
warehouse = None
if warehouse:
serializer = ShopSerializer(warehouse, many=True)
return Response({"success": True, "message": "Data is shown", "data": serializer.data})
else:
return Response({"success": False, "message": "No data could be retrieved", "data": []})
def delete_warehouse(request, warehouse_id):
try:
warehouse = Warehouse.objects.get(id=warehouse_id)
except:
warehouse = None
if warehouse:
warehouse.delete()
return JsonResponse({"success": True, "message": "Warehouse has been deleted"})
else:
return JsonResponse({"success": False, "message": "Warehouse does not exist"})
def delete_shop(request, shop_id):
try:
warehouse = Shop.objects.get(id=shop_id)
except:
warehouse = None
if warehouse:
warehouse.delete()
return JsonResponse({"success": True, "message": "Shop has been deleted"})
else:
return JsonResponse({"success": False, "message": "Shop does not exist"})
@api_view(["GET", ])
def inventory_lists(request, order_details_id):
try:
product = OrderDetails.objects.get(id=order_details_id)
except:
product = None
print(product)
if product:
product_id = product.product_id
product_size = product.product_size
product_color = product.product_color
product_specification_id = product.specification_id
try:
spec = ProductSpecification.objects.get(id=product_specification_id)
except:
spec = None
if spec:
specification_id = spec.id
print(specification_id)
try:
warehouses = WarehouseInfo.objects.filter(
specification_id=specification_id)
except:
warehouses = None
print(warehouses)
warehouse_infos = []
if warehouses:
warehouse_ids = list(
warehouses.values_list('warehouse_id', flat=True))
warehouse_quantities = list(
warehouses.values_list('quantity', flat=True))
for i in range(len(warehouse_ids)):
try:
warehouse = Warehouse.objects.get(id=warehouse_ids[i])
except:
warehouse = None
if warehouse:
name = warehouse.warehouse_name
location = warehouse.warehouse_location
quantity = warehouse_quantities[i]
warehouse_data = {
"id": warehouse_ids[i], "name": name, "location": location, "quantity": quantity}
else:
warehouse_data = {}
warehouse_infos.append(warehouse_data)
else:
warehouse_infos = []
try:
shops = ShopInfo.objects.filter(
specification_id=specification_id)
except:
shops = None
shop_infos = []
if shops:
shop_ids = list(shops.values_list('shop_id', flat=True))
shop_quantities = list(
shops.values_list('quantity', flat=True))
for i in range(len(shop_ids)):
try:
shop = Shop.objects.get(id=shop_ids[i])
except:
shop = None
if warehouse:
name = shop.shop_name
location = shop.shop_location
quantity = shop_quantities[i]
shop_data = {
"id": shop_ids[i], "name": name, "location": location, "quantity": quantity}
else:
shop_data = {}
shop_infos.append(shop_data)
else:
shop_infos = []
else:
warehouse_infos = []
shop_infos = []
return JsonResponse({'success': True, 'message': 'Data is shown below', 'warehouse': warehouse_infos, 'shop': shop_infos})
@api_view(["GET", ])
def warehouse_products(request, warehouse_id):
try:
products = Warehouse.objects.get(id=warehouse_id)
except:
products = None
if products:
warehouse_serializer = WarehouseSerializer(products, many=False)
warehouse_data = warehouse_serializer.data
return JsonResponse({'success': True, 'message': 'Here is the data', 'data': warehouse_data})
else:
warehouse_data = {}
return JsonResponse({'success': False, 'message': 'Here is the data', 'data': warehouse_data})
@api_view(["GET", ])
def shop_products(request, shop_id):
try:
products = Shop.objects.get(id=shop_id)
except:
products = None
if products:
warehouse_serializer = ShopSerializer(products, many=False)
warehouse_data = warehouse_serializer.data
return JsonResponse({'success': True, 'message': 'Here is the data', 'data': warehouse_data})
else:
warehouse_data = {}
return JsonResponse({'success': False, 'message': 'Here is the data', 'data': warehouse_data})
# ----------------------------------- quantity store in different shop/inventory ------------------------
@api_view(["GET", "POST"])
def insert_product_quantity(request):
# demo values
# api_values = {
# 'product_id':35,
# 'specification_id':34,
# 'purchase_price': 100,
# 'selling_price': 120,
# 'warehouse': [
# {
# 'warehouse_id': 1,
# 'quantity': 200
# },
# {
# 'warehouse_id': 2,
# 'quantity': 200
# }
# ],
# 'shop': [
# {
# 'shop_id': 3,
# 'quantity': 200
# },
# {
# 'shop_id': 2,
# 'quantity': 200
# },
# {
# 'shop_id': 1,
# 'quantity': 200
# }
# ]
# }
api_values = request.data
current_date = date.today()
if request.method == 'POST':
#Insert the purchase price and selling price for that object:
try:
price_data = {"product_id":api_values["product_id"],"specification_id":api_values["specification_id"],"price":api_values["selling_price"],"purchase_price":api_values["purchase_price"]}
#Inserting the price
product_price_serializer = ProductPriceSerializer(data = price_data)
if product_price_serializer.is_valid():
product_price_serializer.save()
except:
return JsonResponse({"success":False,"message":"The price could not be inserted"})
try:
#Fetching the product price
prod_price = ProductPrice.objects.filter(specification_id=api_values["specification_id"]).last()
except:
prod_price = None
if prod_price:
purchase_price = prod_price.purchase_price
selling_price = prod_price.price
else:
return JsonResponse({"success":False,"message":"Price does not exist for this product"})
try:
# checking is there any warehouse data exists or not
if len(api_values['warehouse']) > 0:
for wareh in api_values['warehouse']:
try:
# getting the previous data if there is any in the similar name. If exists update the new value. if does not create new records.
wareh_query = WarehouseInfo.objects.filter(
warehouse_id=wareh['warehouse_id'], specification_id=api_values['specification_id']).last()
print("quertresult")
print(wareh_query)
if wareh_query:
# quantity_val = wareh_query[0].quantity
# new_quantity = quantity_val + wareh['quantity']
# wareh_query.update(quantity=new_quantity)
# wareh_query.save()
print("existing warehouse")
print(type(wareh['quantity']))
print(wareh_query.quantity)
warehouse_quantity = wareh_query.quantity
print(warehouse_quantity)
new_quantity = warehouse_quantity + int(wareh['quantity'])
print(new_quantity)
wareh_query.quantity = new_quantity
print(wareh_query.quantity)
wareh_query.save()
print(wareh_query.quantity)
try:
product_spec = ProductSpecification.objects.get(id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("else ey dhuktese")
wareh_data = WarehouseInfo.objects.create(specification_id=api_values['specification_id'], product_id=api_values['product_id'], warehouse_id=wareh['warehouse_id'],
quantity=int(wareh['quantity']))
wareh_data.save()
try:
product_spec = ProductSpecification.objects.get(id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
# updating the inventory report credit records for each ware house quantity. It will help to keep the records in future.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=wareh['quantity'], warehouse_id=wareh['warehouse_id'])
# report_data.save()
#Check to see if there are any inventory_reports
# try:
# report = inventory_report.objects.filter(product_id=api_values['product_id'],specification_id=api_values['specification_id'],warehouse_id=wareh['warehouse_id'],date=current_date).last()
# except:
# report = None
# if report:
# #Update the existing report
# report.credit += int(wareh['quantity'])
# report.save()
new_report = inventory_report.objects.create(product_id=api_values['product_id'],specification_id=api_values['specification_id'],warehouse_id=wareh['warehouse_id'],credit=int(wareh['quantity']),date=current_date,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
except:
pass
if len(api_values['shop']) > 0:
for shops in api_values['shop']:
try:
# getting the existing shop values if is there any.
print(shops['shop_id'])
shop_query = ShopInfo.objects.filter(
shop_id=shops['shop_id'], specification_id=api_values['specification_id']).last()
print(shop_query)
if shop_query:
print("shop ase")
quantity_val = shop_query.quantity
new_quantity = quantity_val + int(shops['quantity'])
# shop_query.update(quantity=new_quantity)
shop_query.quantity = new_quantity
shop_query.save()
try:
product_spec = ProductSpecification.objects.get(id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("shop nai")
shop_data = ShopInfo.objects.create(specification_id=api_values['specification_id'], product_id=api_values['product_id'], shop_id=shops['shop_id'],
quantity=int(shops['quantity']))
shop_data.save()
# Updating the report table after being inserted the quantity corresponding to credit coloumn for each shop.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=shops['quantity'], shop_id=shops['shop_id'])
# report_data.save()
try:
product_spec = ProductSpecification.objects.get(id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
new_report = inventory_report.objects.create(product_id=api_values['product_id'],specification_id=api_values['specification_id'],shop_id=shops['shop_id'],credit=int(shops['quantity']),date=current_date,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
except:
pass
return Response({
"success": True,
"message": "Data has been added successfully"
})
except:
return Response({
"success": False,
"message": "Something went wrong !!"
})
@api_view(["GET", "POST"])
def get_all_quantity_list(request, specification_id):
if request.method == 'GET':
try:
warehouse_values = []
shop_values = []
warehouse_ids = []
shop_ids = []
warehouse_query = WarehouseInfo.objects.filter(
specification_id=specification_id)
print(warehouse_query)
wh_name = Warehouse.objects.all()
print(wh_name)
for wq in warehouse_query:
print(wq.warehouse_id)
warehouse_data = Warehouse.objects.get(id=wq.warehouse_id)
wh_data = {"warehouse_id": warehouse_data.id, "previous_quantity": wq.quantity,
"warehouse_name": warehouse_data.warehouse_name}
print(wh_data)
warehouse_values.append(wh_data)
warehouse_ids.append(wq.warehouse_id)
print(warehouse_values)
for warehouse in wh_name:
if warehouse.id not in warehouse_ids:
wh_data = {"warehouse_id": warehouse.id, "previous_quantity": 0,
"warehouse_name": warehouse.warehouse_name}
warehouse_values.append(wh_data)
print(warehouse_values)
shopinfo_query = ShopInfo.objects.filter(
specification_id=specification_id)
all_shops = Shop.objects.all()
print(shopinfo_query)
print(all_shops)
for shop in shopinfo_query:
shop_data = Shop.objects.get(id=shop.shop_id)
datas = {"shop_id": shop_data.id, "previous_quantity": shop.quantity,
"shop_name": shop_data.shop_name}
shop_values.append(datas)
shop_ids.append(shop.shop_id)
for shops in all_shops:
if shops.id not in shop_ids:
datas = {"shop_id": shops.id, "previous_quantity": 0,
"shop_name": shops.shop_name}
shop_values.append(datas)
return JsonResponse({
"success": True,
"message": "Data has been retrieved successfully",
"data": {
"warehouse": warehouse_values,
"shop": shop_values
}
})
except:
return JsonResponse({
"success": False,
"message": "Something went wrong"
})
# @api_view(["GET", "POST"])
# def get_all_quantity_list_and_price(request, specification_id):
# if request.method == 'GET':
# purchase_price = 0
# selling_price = 0
# try:
# spec_price = SpecificationPrice.objects.filter(specification_id = specification_id,status="Single").last()
# except:
# spec_price = None
# if spec_price:
# purchase_price = spec_price.purchase_price
# selling_price = spec_price.mrp
# try:
# warehouse_values = []
# shop_values = []
# warehouse_ids = []
# shop_ids = []
# warehouse_query = WarehouseInfo.objects.filter(
# specification_id=specification_id)
# print(warehouse_query)
# wh_name = Warehouse.objects.all()
# print(wh_name)
# for wq in warehouse_query:
# print(wq.warehouse_id)
# warehouse_data = Warehouse.objects.get(id=wq.warehouse_id)
# wh_data = {"warehouse_id": warehouse_data.id, "previous_quantity": wq.quantity,
# "warehouse_name": warehouse_data.warehouse_name}
# print(wh_data)
# warehouse_values.append(wh_data)
# warehouse_ids.append(wq.warehouse_id)
# print(warehouse_values)
# for warehouse in wh_name:
# if warehouse.id not in warehouse_ids:
# wh_data = {"warehouse_id": warehouse.id, "previous_quantity": 0,
# "warehouse_name": warehouse.warehouse_name}
# warehouse_values.append(wh_data)
# print(warehouse_values)
# shopinfo_query = ShopInfo.objects.filter(
# specification_id=specification_id)
# all_shops = Shop.objects.all()
# print(shopinfo_query)
# print(all_shops)
# for shop in shopinfo_query:
# shop_data = Shop.objects.get(id=shop.shop_id)
# datas = {"shop_id": shop_data.id, "previous_quantity": shop.quantity,
# "shop_name": shop_data.shop_name}
# shop_values.append(datas)
# shop_ids.append(shop.shop_id)
# for shops in all_shops:
# if shops.id not in shop_ids:
# datas = {"shop_id": shops.id, "previous_quantity": 0,
# "shop_name": shops.shop_name}
# shop_values.append(datas)
# return JsonResponse({
# "success": True,
# "message": "Data has been retrieved successfully",
# "data": {
# "warehouse": warehouse_values,
# "shop": shop_values ,
# "purchase_price": purchase_price,
# "selling_price" : selling_price
# }
# })
# except:
# return JsonResponse({
# "success": False,
# "message": "Something went wrong"
# })
@api_view(["GET", "POST"])
def create_all_brand(request):
brand_name = request.data.get("Brand_name")
brand_owner = request.data.get("Brand_owner")
brand_country = request.data.get("Brand_country")
brand_name = brand_name.capitalize()
print(brand_name)
data = {'Brand_name':brand_name,'Brand_country':brand_country,'Brand_owner':brand_owner}
try:
brands = ProductBrand.objects.all()
except:
brands = None
flag = 0
if brands:
brand_list=list(brands.values_list('Brand_name',flat=True))
brand_ids=list(brands.values_list('id',flat=True))
for i in range(len(brand_list)):
brand_upper = brand_list[i].upper()
# print(brand_upper)
brand_lower = brand_list[i].lower()
# print(brand_lower)
if brand_name == brand_list[i]:
brand_name = brand_list[i]
brand_id = brand_ids[i]
flag = 1
break
# elif brand_name == brand_upper:
# brand_name = brand_upper
# flag = 1
# break
# elif brand_name == brand_lower:
# brand_name = brand_lower
# flag = 1
# break
message = "The brand " + brand_name + " already exists."
print(message)
if flag == 1:
return JsonResponse({'success':False,'message': message,'brand_id':brand_id})
else:
serializer = AddBrandSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse({
"success": True,
"message": "Brand has been inserted successfully",
"data": serializer.data
})
else:
serializer = AddBrandSerializer(data=data)
if serializer.is_valid():
serializer.save()
return JsonResponse({
"success": True,
"message": "Brand has been inserted successfully",
"data": serializer.data
})
@api_view(["GET", "POST"])
def get_all_brand(request):
if request.method == 'GET':
try:
brand_query = ProductBrand.objects.all()
brand_serializers = AddBrandSerializer(brand_query, many=True)
return JsonResponse({
"success": True,
"message": "Brand has been retrived successfully",
"data": brand_serializers.data
})
except:
return JsonResponse({
"success": False,
"message": "SSomething Went wrong"
})
@api_view(["GET", "POST"])
def delete_specific_brand(request, brand_id):
if request.method == 'POST':
try:
product_brand = ProductBrand.objects.get(id=brand_id)
except:
product_brand = None
if product_brand:
if product_brand.Brand_name == "Individual":
return JsonResponse({
"success": False,
"message": "You are not allowed to delete Individual Brand"})
else:
product_brand.delete()
return JsonResponse({
"success": True,
"message": "Desired Brand has been deleted successfully"})
else:
return JsonResponse({
"success": False,
"message": "Desired Brand does not exist"
})
@api_view(["GET", "POST"])
def update_specific_brand(request, brand_id):
if request.method == 'POST':
try:
product_brand = ProductBrand.objects.get(id=brand_id)
except:
product_brand = None
if product_brand:
if product_brand.Brand_name == "Individual":
return JsonResponse({
"success": False,
"message": "You are not allowed to modify Individual Brand"})
else:
brand_serializers = AddBrandSerializer(
product_brand, data=request.data)
if brand_serializers.is_valid():
brand_serializers.save()
return JsonResponse({
"success": True,
"message": "Desired Brand has been modified successfully",
"data": brand_serializers.data})
else:
return JsonResponse({
"success": False,
"message": "Desired Brand does not exist"
})
# def warehouse
@api_view(["GET",])
def warehouse_report(request):
try:
report = inventory_report.objects.filter(shop_id = -1)
except:
report = None
print(report)
if report:
report_serializer = InventoryReportSerializer(report,many=True)
return JsonResponse({'success':True,'message':'Data is shown','data':report_serializer.data})
else:
return JsonResponse({'success':False,'message':'Data is not shown'})
# def warehouse
@api_view(["GET",])
def shop_report(request):
try:
report = inventory_report.objects.filter(warehouse_id = -1)
except:
report = None
if report:
report_serializer = InventoryReportSerializer(report,many=True)
return JsonResponse({'success':True,'message':'Data is shown','data':report_serializer.data})
else:
return JsonResponse({'success':False,'message':'Data is not shown'})
@api_view(["GET", "POST"])
def get_subtracted_value(request, order_id,specification_id):
if request.method == "GET":
try:
values=[]
all_info=[]
spec_value={}
all_ware=[]
all_shop=[]
tracking_values = subtraction_track.objects.filter(order_id = order_id)
for track in tracking_values:
values.append(track.specification_id)
values = set(values)
data_values = subtraction_track.objects.filter(order_id = order_id, specification_id = specification_id)
for itenary in data_values:
ware_house={}
shop_house ={}
if itenary.warehouse_id != -1:
try:
ware_info = Warehouse.objects.get(id = itenary.warehouse_id)
ware_name = ware_info.warehouse_name
except:
ware_name = None
ware_house.update({'warehouse_id': itenary.warehouse_id, 'warehouse_name':ware_name , 'added_quantity':itenary.debit_quantity, 'date': itenary.date})
all_ware.append(ware_house)
if itenary.shop_id != -1:
try:
shop_info = Shop.objects.get(id = itenary.shop_id )
shop_name = shop_info.shop_name
except:
shop_name = None
shop_house.update({'shop_id': itenary.shop_id,'shop_name':shop_name, 'added_quantity':itenary.debit_quantity, 'date': itenary.date})
all_shop.append(shop_house)
allshops = Shop.objects.all()
shopinfos = []
for shp in all_shop:
shopinfos.append(shp['shop_id'])
for shop_val in allshops:
shop_house ={}
if shop_val.id not in shopinfos:
shop_house.update({'shop_id': shop_val.id,'shop_name':shop_val.shop_name, 'added_quantity':0, 'date': ''})
all_shop.append(shop_house)
allware = Warehouse.objects.all()
wareinfos = []
for wre in all_ware:
wareinfos.append(wre['warehouse_id'])
for ware_val in allware:
ware_house ={}
if ware_val.id not in wareinfos:
ware_house.update({'warehouse_id': ware_val.id, 'warehouse_name':ware_val.warehouse_name , 'added_quantity':0, 'date': ''})
all_ware.append(ware_house)
spec_value.update({'specification_id':specification_id,'ware_house':all_ware, 'shop_house': all_shop })
all_info.append(spec_value)
return JsonResponse({
'success':True,
'message': 'Data has been retrieved successfully',
'data': all_info
})
except:
return JsonResponse({
'success':False,
'message': 'Something went wrong!! Data could not retrived successfully',
})
# def warehouse
@api_view(["GET",])
def purchase_reports(request):
try:
report = inventory_report.objects.all()
except:
report = None
print("report")
print(report)
#Finding out the individual dates
if report:
main_data = []
specification_ids = list(report.values_list('specification_id',flat=True).distinct())
print(specification_ids)
for i in range(len(specification_ids)):
try:
#Finding out the entries for that specification_id
reports = inventory_report.objects.filter(specification_id=specification_ids[i])
except:
reports = None
print(reports)
if reports:
#Finding out different purchase prices for that specification
different_prices = []
different_purchase_prices = list(reports.values_list('purchase_price',flat=True).distinct())
print("different purchase price")
print(different_purchase_prices)
for j in range(len(different_purchase_prices)):
try:
specific_rows = inventory_report.objects.filter(purchase_price=different_purchase_prices[j],specification_id=specification_ids[i])
except:
specific_rows = None
print("specificrows",specific_rows)
if specific_rows:
debit_sum_list = list(specific_rows.values_list('requested', flat=True))
credit_sum_list = list(specific_rows.values_list('credit', flat=True))
selling_prices = list(specific_rows.values_list('selling_price', flat=True))
inventory_ids = list(specific_rows.values_list('id', flat=True))
debit_sum = int(sum(debit_sum_list))
credit_sum = int(sum(credit_sum_list))
if selling_prices[0] == None:
selling_prices[0] = 0
selling_price = int(selling_prices[0])
purchase_price = different_purchase_prices[j]
try:
specific_inventory = inventory_report.objects.get(id=inventory_ids[0])
except:
specific_inventory = None
if specific_inventory:
inventory_serializer = InventoryReportSerializer(specific_inventory,many=False)
inventory_data = inventory_serializer.data
product_name = inventory_data["product_name"]
product_brand = inventory_data["product_brand"]
product_sku = inventory_data["product_sku"]
product_barcode = inventory_data["product_barcode"]
product_id = inventory_data["product_id"]
specification_id = inventory_data["specification_id"]
response_data = {"product_id":product_id,"specification_id":specification_id,"product_name":product_name,"product_sku":product_sku,"product_barcode":product_barcode,"product_brand":product_brand,"purchase_price":purchase_price,"selling_price":selling_price,"debit_sum":debit_sum,"credit_sum":credit_sum}
different_prices.append(response_data)
else:
pass
else:
pass
else:
pass
main_data.append(different_prices)
return JsonResponse({"success":True,"message":"The data is shown below","data":main_data})
else:
return JsonResponse({"success":False,"message":"The products dont exist"})
def add_delivery_data(value):
# 'arrayForDelivery': [
# {
# 'selectedDistrict': 'Dhaka',
# 'selectedThana':[
# 'Banani',
# 'Gulshan',
# 'Rampura',
# 'Dhanmondi'
# ]
# },
# {
# 'selectedDistrict': 'Barishal',
# 'selectedThana':[
# 'Hizla',
# 'Muladi',
# 'Borguna',
# 'Betagi'
# ]
# }
# ]
try:
option_data = value
option = option_data['option']
spec_id = option_data['spec']
arrayForDelivery = option_data['arrayForDelivery']
delivery_saving_data = {}
if option == "all":
delivery_saving_data.update({'specification_id':spec_id })
info_serializer = ProductDeliveryAreaSerializer (data = delivery_saving_data)
if info_serializer.is_valid():
info_serializer.save()
return "saved"
else:
return "error"
elif option == "manual":
for del_area in arrayForDelivery:
district = del_area['selectedDistrict']
all_thanas= del_area['selectedThana']
thanas_id=[]
for thana in all_thanas:
try:
location_info = DeliveryLocation.objects.get(location_name = thana)
location_id = location_info.id
thanas_id.append(location_id)
except:
location_id = -1
try:
area_info = DeliveryArea.objects.get(Area_name = district)
area_id = area_info.id
except:
area_id = -1
delivery_saving_data.update({
'specification_id':spec_id,
'is_Bangladesh': False,
'delivery_area_id': area_id,
'delivery_location_ids': thanas_id
})
info_serializer = ProductDeliveryAreaSerializer (data = delivery_saving_data)
if info_serializer.is_valid():
info_serializer.save()
return option_data
except:
return "error"
def add_delivery_data1(value):
# 'arrayForDelivery': [
# {
# 'selectedDistrict': 'Dhaka',
# 'selectedThana':[
# 'Banani',
# 'Gulshan',
# 'Rampura',
# 'Dhanmondi'
# ]
# },
# {
# 'selectedDistrict': 'Barishal',
# 'selectedThana':[
# 'Hizla',
# 'Muladi',
# 'Borguna',
# 'Betagi'
# ]
# }
# ]
try:
print("dhuktese")
option_data = value
option = option_data['option']
spec_id = option_data['spec']
try:
previous_entry = product_delivery_area.objects.filter(specification_id=spec_id)
print(previous_entry)
except:
previous_entry = None
if previous_entry:
previous_entry.delete()
else:
pass
arrayForDelivery = option_data['arrayForDelivery']
delivery_saving_data = {}
if option == "all":
delivery_saving_data.update({'specification_id':spec_id })
info_serializer = ProductDeliveryAreaSerializer (data = delivery_saving_data)
if info_serializer.is_valid():
info_serializer.save()
return "saved"
else:
return "error"
elif option == "manual":
for del_area in arrayForDelivery:
district = del_area['selectedDistrict']
all_thanas= del_area['selectedThana']
thanas_id=[]
for thana in all_thanas:
try:
location_info = DeliveryLocation.objects.get(location_name = thana)
location_id = location_info.id
thanas_id.append(location_id)
except:
location_id = -1
try:
area_info = DeliveryArea.objects.get(Area_name = district)
area_id = area_info.id
except:
area_id = -1
delivery_saving_data.update({
'specification_id':spec_id,
'is_Bangladesh': False,
'delivery_area_id': area_id,
'delivery_location_ids': thanas_id
})
info_serializer = ProductDeliveryAreaSerializer (data = delivery_saving_data)
if info_serializer.is_valid():
info_serializer.save()
return option_data
except:
return "error"
@api_view(['POST', ])
def add_spec(request, product_id):
current_date = date.today()
# print(request.data)
# print("user_id")
# print(request.data.get("uid"))
# print("lalalalalala")
product_status = request.data.get("publish")
if product_status:
if product_status == "Published":
product_status = "Published"
elif product_status == "Pending":
product_status = "Pending"
else:
product_status = "Published"
manufacture_date = request.data.get("manufacture_date")
expire_date = request.data.get("expire")
if manufacture_date == "" or expire_date == "":
specification_data_value = {
'product_id': product_id,
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': request.data.get("vat"),
# 'seller_quantity': request.data.get("seller_quantity"),
# 'remaining': request.data.get("seller_quantity"),
# 'manufacture_date': request.data.get("manufacture_date"),
# 'expire': request.data.get("expire"),
'admin_status': 'Confirmed',
'is_own' :True,
'specification_status': product_status
}
else:
specification_data_value = {
'product_id': product_id,
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': request.data.get("vat"),
# 'seller_quantity': request.data.get("seller_quantity"),
# 'remaining': request.data.get("seller_quantity"),
'manufacture_date': request.data.get("manufacture_date"),
'expire': request.data.get("expire"),
'admin_status': 'Confirmed',
'is_own' :True,
'specification_status': product_status
}
# product_price = {
# 'product_id': product_id,
# 'price': request.data.get("price"),
# 'purchase_price': request.data.get("purchase_price"),
# # 'currency_id': request.data.get('currency_id')
# }
discount_type = request.data.get("discount_type")
discount_amount = request.data.get("discount_amount")
discount_start_date = request.data.get("discount_start_date")
discount_end_date = request.data.get("discount_end_date")
point_amount = request.data.get("point_amount")
point_start_date = request.data.get("point_start_date")
point_end_date = request.data.get("point_end_date")
if discount_type == "none" or discount_amount == '' or discount_start_date == '' or discount_end_date == '':
discount_flag = False
else:
discount_flag = True
print(discount_flag)
if ((point_amount == "") or (point_start_date == "") or (point_end_date == "")):
point_flag = False
# print("False")
else:
point_flag = True
print(point_flag)
product_discount = {
'product_id': product_id,
'amount': request.data.get("discount_amount"),
'discount_type': request.data.get("discount_type"),
'start_date': request.data.get("discount_start_date"),
# 'end_date' : data['discount_end_date']
'end_date': request.data.get("discount_end_date")
}
print(product_discount)
product_point = {
'product_id': product_id,
'point': request.data.get("point_amount"),
# 'end_date': data['point_end_date']
'start_date': request.data.get("point_start_date"),
'end_date': request.data.get("point_end_date")
}
delivery_info = {
'height': request.data.get("delivery_height"),
'width': request.data.get("delivery_width"),
'length': request.data.get("delivery_length"),
'weight': request.data.get("delivery_weight"),
'measument_unit': request.data.get("delivery_product_unit"),
'delivery_free': request.data.get("delivery_free"),
}
print(delivery_info)
product_code = {
'product_id': product_id,
'manual_SKU' : request.data.get("SKU"),
'uid': request.data.get("uid"),
}
if request.method == 'POST':
delivery_id = 0
discount_id = 0
point_id = 0
price_id = 0
specification_id = 0
flag = 0
spec = {}
price = {}
discount = {}
point = {}
delivery = {}
code={}
try:
print("ashtese")
product_spec = ProductSpecificationSerializerz(
data=specification_data_value)
if product_spec.is_valid():
product_spec.save()
print("888888888888888888 spec save hoise")
spec.update(product_spec.data)
# print("Specification_id", spec["id"])
specification_id = spec["id"]
else:
print(product_spec.errors)
specification_id = 0
flag = flag+1
# product_price.update({'specification_id': spec['id']})
# product_price = ProductPriceSerializer(data=product_price)
# if product_price.is_valid():
# product_price.save()
# # print("price save hochche")
# price.update(product_price.data)
# price_id = price["id"]
# else:
# price_id = 0
# flag = flag+1
if discount_flag == False:
discount = {}
else:
product_discount.update({'specification_id': spec['id']})
print("product_discount")
print(product_discount)
product_dis = ProductDiscountSerializer(data=product_discount)
if product_dis.is_valid():
product_dis.save()
print(product_dis.errors)
# print("savwe hochche")
discount.update(product_dis.data)
discount_id = discount["id"]
else:
print(product_dis.errors)
discount_id = 0
flag = flag+1
if point_flag == False:
point = {}
else:
# print("99999999999999999999999999999")
product_point.update({'specification_id': spec['id']})
product_point_value = ProductPointSerializer(
data=product_point)
if product_point_value.is_valid():
product_point_value.save()
print("point save")
point.update(product_point_value.data)
point_id = point["id"]
else:
point_id = 0
print(product_point_value.errors)
flag = flag+1
delivery_info.update({'specification_id': spec['id']})
# print("here delivery",delivery_info )
delivery_value = DeliveryInfoSerializer(data=delivery_info)
# print("serializer",delivery_value)
if delivery_value.is_valid():
# print("Inside the delivery ")
delivery_value.save()
# print("delivery is saved")
delivery.update(delivery_value.data)
delivery_id = delivery["id"]
else:
delivery_id = 0
print("errors delivery " ,delivery_value.errors)
flag = flag+1
product_code.update({'specification_id':spec['id']})
# print("product point",product_code )
product_code_value= ProductCodeSerializer (data=product_code)
# print("product code serial", product_code_value)
# print("before validation")
if product_code_value.is_valid():
# print("inside validation")
product_code_value.save()
print("code is saved", product_code_value.data)
code.update(product_code_value.data)
create_product_code(product_code)
code_id = code["id"]
# print("code id", code_id)
else:
# print("code error", product_code_value.errors)
flag= flag+1
data_val = {
'option' : request.data.get("option"),
'spec': spec['id'],
# 'arrayForDelivery': [
# {
# 'selectedDistrict': 'Dhaka',
# 'selectedThana':[
# 'Banani',
# 'Gulshan',
# 'Rampura',
# 'Dhanmondi'
# ]
# },
# {
# 'selectedDistrict': 'Barishal',
# 'selectedThana':[
# 'Hizla',
# 'Muladi',
# 'Borguna',
# 'Betagi'
# ]
# }
# ]
'arrayForDelivery': request.data.get("arrayForDelivery")
}
# print("before calling method")
value = add_delivery_data(data_val)
if flag > 0 or value == 'error' :
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
try:
poi = ProductPoint.objects.get(id=point_id)
except:
poi = None
if poi:
poi.delete()
try:
dis = discount_product.objects.get(id=discount_id)
except:
dis = None
if dis:
dis.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
try:
deli = ProductCode.objects.get(id=code_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
else:
return JsonResponse({
"success": True,
"message": "Specification data has been inserted Successfully",
"specification": spec,
"price": price,
"discount": discount,
"point": point,
"delivery": delivery
})
except:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
# try:
# pri = ProductPrice.objects.get(id=price_id)
# except:
# pri = None
# if pri:
# pri.delete()
try:
poi = ProductPoint.objects.get(id=point_id)
except:
poi = None
if poi:
poi.delete()
try:
dis = discount_product.objects.get(id=discount_id)
except:
dis = None
if dis:
dis.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
try:
deli = ProductCode.objects.get(id=code_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
@api_view(['POST', ])
def merchant_spec(request, product_id):
current_date = date.today()
specification_data_value = {
'product_id': product_id,
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': request.data.get("vat"),
'manufacture_date': request.data.get("manufacture_date"),
'expire': request.data.get("expire")
}
delivery_info = {
'height': request.data.get("delivery_height"),
'width': request.data.get("delivery_width"),
'length': request.data.get("delivery_length"),
'weight': request.data.get("delivery_weight"),
'measument_unit': request.data.get("delivery_product_unit"),
}
product_code = {
'product_id': product_id,
'manual_SKU' : request.data.get("SKU"),
'uid': request.data.get("uid"),
}
if request.method == 'POST':
delivery_id = 0
discount_id = 0
point_id = 0
price_id = 0
specification_id = 0
flag = 0
spec = {}
delivery = {}
code={}
try:
product_spec = ProductSpecificationSerializerz(
data=specification_data_value)
if product_spec.is_valid():
product_spec.save()
spec.update(product_spec.data)
specification_id = spec["id"]
else:
specification_id = 0
flag = flag+1
delivery_info.update({'specification_id': spec['id']})
delivery_value = DeliveryInfoSerializer(data=delivery_info)
if delivery_value.is_valid():
delivery_value.save()
delivery.update(delivery_value.data)
delivery_id = delivery["id"]
else:
delivery_id = 0
flag = flag+1
product_code.update({'specification_id':spec['id']})
product_code_value= ProductCodeSerializer (data=product_code)
if product_code_value.is_valid():
product_code_value.save()
code.update(product_code_value.data)
create_product_code(product_code)
code_id = code["id"]
else:
flag= flag+1
if flag > 0:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
try:
deli = ProductCode.objects.get(id=code_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
else:
return JsonResponse({
"success": True,
"message": "Specification data has been inserted Successfully",
"specification": spec,
"delivery": delivery
})
except:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
try:
deli = ProductCode.objects.get(id=code_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
@api_view(['POST', ])
def merchant_spec_edit(request, specification_id):
specification_data_value = {
'color': request.data.get("color"),
'size': request.data.get("size"),
'weight': request.data.get("weight"),
'warranty': request.data.get("warranty"),
'warranty_unit': request.data.get("warranty_unit"),
'unit': request.data.get("product_unit"),
'vat': request.data.get("vat"),
'manufacture_date': request.data.get("manufacture_date"),
'expire': request.data.get("expire")
}
delivery_info = {
'height': request.data.get("delivery_height"),
'width': request.data.get("delivery_width"),
'length': request.data.get("delivery_length"),
'weight': request.data.get("delivery_weight"),
'measument_unit': request.data.get("delivery_product_unit"),
}
if request.method == 'POST':
delivery_id = 0
flag = 0
spec = {}
delivery = {}
try:
try:
merchant_spec = ProductSpecification.objects.get(pk=specification_id, admin_status = 'Processing')
merchant_delivery = DeliveryInfo.objects.get(specification_id = specification_id)
except:
merchant_spec = None
merchant_delivery = None
if merchant_spec and merchant_delivery:
product_spec = ProductSpecificationSerializerz(merchant_spec,data=specification_data_value)
if product_spec.is_valid():
product_spec.save()
spec.update(product_spec.data)
else:
flag = flag+1
delivery_value = DeliveryInfoSerializer(merchant_delivery,data=delivery_info)
if delivery_value.is_valid():
delivery_value.save()
delivery.update(delivery_value.data)
else:
delivery_id = 0
flag = flag+1
if flag > 0:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
else:
return JsonResponse({
"success": True,
"message": "Specification data has been updated Successfully !!",
"specification": spec,
"delivery": delivery
})
else:
return JsonResponse({
"success": False,
"message": "Update is restriced after specification being approved/cancelled by main site !!"
})
except:
try:
spe = ProductSpecification.objects.get(id=specification_id)
except:
spe = None
if spe:
spe.delete()
try:
deli = DeliveryInfo.objects.get(id=delivery_id)
except:
deli = None
if deli:
deli.delete()
return JsonResponse({
"success": False,
"message": "Something went wrong !!"
})
@api_view(['GET', ])
def merchant_products(request,seller_id):
specification_ids = []
try:
product = Product.objects.filter(seller=seller_id)
except:
product= None
if product:
product_ids = list(product.values_list('id',flat=True))
try:
product_specs = ProductSpecification.objects.filter(product_id__in=product_ids)
except:
product_specs = None
if product_specs:
product_specs_serializer = SellerSpecificationSerializer(product_specs,many=True)
return JsonResponse({"success":True,"message":"Products are displayed","data":product_specs_serializer.data})
else:
return({"success":False,"message":"There are no products to display"})
else:
return({"success":False,"message":"There are no products to display"})
@api_view(["GET",])
def cancel_invoice(request,invoice_id):
try:
invoice = Invoice.objects.get(id=invoice_id)
except:
invoice = None
if invoice:
if invoice.order_id:
try:
order = Order.objects.get(id=invoice.order_id)
except:
order = None
if order:
order.admin_status = "Cancelled"
order.save()
return JsonResponse({"success":True,"message":"This invoice has been cancelled"})
else:
return JsonResponse({"success":False,"message":"This order does not exist"})
else:
return JsonResponse({"success":False,"message":"This order does not exist"})
else:
return JsonResponse({"success":False,"message":"This invoice does not exist"})
@api_view(["GET", "POST"])
def seller_insert_product_quantity(request):
# demo values
# api_values = [{
# 'product_id':35,
# 'order_id': 111,
# 'product_status': "Approved",
# 'specification_id':87,
# 'order_details_id': 243,
# 'purchase_price': 100,
# 'selling_price': 120,
# 'warehouse': [
# {
# 'warehouse_id': 1,
# 'quantity': 200
# },
# {
# 'warehouse_id': 2,
# 'quantity': 200
# }
# ],
# 'shop': [
# {
# 'shop_id': 3,
# 'quantity': 200
# },
# {
# 'shop_id': 2,
# 'quantity': 200
# },
# {
# 'shop_id': 1,
# 'quantity': 200
# }
# ]
# },
# {
# 'product_id':35,
# 'order_id': 111,
# 'product_status': "Cancelled",
# 'specification_id':28,
# 'order_details_id': 242,
# 'purchase_price': 100,
# 'selling_price': 120,
# 'warehouse': [
# {
# 'warehouse_id': 1,
# 'quantity': 200
# },
# {
# 'warehouse_id': 2,
# 'quantity': 200
# }
# ],
# 'shop': [
# {
# 'shop_id': 3,
# 'quantity': 200
# },
# {
# 'shop_id': 2,
# 'quantity': 200
# },
# {
# 'shop_id': 1,
# 'quantity': 200
# }
# ]
# },
# {
# 'product_id':35,
# 'order_id': 111,
# 'product_status': "Cancelled",
# 'specification_id':45,
# 'order_details_id': 244,
# }]
api_values = request.data
current_date = date.today()
quantity_flag = 0
order_id = api_values[0]["order_id"]
print(order_id)
if request.method == 'POST':
data_length = int(len(api_values))
main_flag = False
try:
for i in range(data_length):
print(i)
print(api_values[i]["product_status"])
if api_values[i]["product_status"] == "Approved":
print("approve hoise")
api_valuess = api_values[i]
mflag = False
pflag = admin_approve_add_merchant_specification(api_valuess)
print("pflag")
print(pflag)
#pflag = True
if pflag == True:
#Insert the purchase price and selling price for that object:
try:
print(api_valuess["product_id"])
print(api_valuess["specification_id"])
print(int(api_valuess["unit_price"]))
#print(int(api_valuess["purchase_price"]))
price_data = {"product_id":api_valuess["product_id"],"specification_id":api_valuess["specification_id"],"price":int(api_valuess["selling_price"]),"purchase_price":int(api_valuess["unit_price"])}
print(price_data)
#Inserting the price
product_price_serializer = ProductPriceSerializer(data = price_data)
if product_price_serializer.is_valid():
product_price_serializer.save()
print(i)
print("price saved")
else:
print(product_price_serializer.errors)
except:
return JsonResponse({"success":False,"message":"The price could not be inserted"})
try:
#Fetching the product price
prod_price = ProductPrice.objects.filter(specification_id=int(api_valuess["specification_id"])).last()
except:
prod_price = None
if prod_price:
print(i)
print("price ase")
purchase_price = prod_price.purchase_price
selling_price = prod_price.price
else:
return JsonResponse({"success":False,"message":"Price does not exist for this product"})
try:
# checking is there any warehouse data exists or not
if len(api_valuess['warehouse']) > 0:
for wareh in api_valuess['warehouse']:
try:
# getting the previous data if there is any in the similar name. If exists update the new value. if does not create new records.
wareh_query = WarehouseInfo.objects.filter(
warehouse_id=int(wareh['warehouse_id']), specification_id=int(api_valuess['specification_id'])).last()
print("quertresult")
print(wareh_query)
if wareh_query:
# quantity_val = wareh_query[0].quantity
# new_quantity = quantity_val + wareh['quantity']
# wareh_query.update(quantity=new_quantity)
# wareh_query.save()
print("existing warehouse")
print(type(wareh['quantity']))
print(wareh_query.quantity)
warehouse_quantity = wareh_query.quantity
print(warehouse_quantity)
new_quantity = warehouse_quantity + int(wareh['quantity'])
print(new_quantity)
wareh_query.quantity = new_quantity
print(wareh_query.quantity)
wareh_query.save()
print(wareh_query.quantity)
try:
product_spec = ProductSpecification.objects.get(id=int(api_valuess['specification_id']))
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("else ey dhuktese")
wareh_data = WarehouseInfo.objects.create(specification_id=int(api_valuess['specification_id']), product_id=int(api_valuess['product_id']), warehouse_id=int(wareh['warehouse_id']),
quantity=int(wareh['quantity']))
wareh_data.save()
try:
product_spec = ProductSpecification.objects.get(id=int(api_valuess['specification_id']))
except:
product_spec = None
if product_spec:
product_spec.save()
# updating the inventory report credit records for each ware house quantity. It will help to keep the records in future.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=wareh['quantity'], warehouse_id=wareh['warehouse_id'])
# report_data.save()
#Check to see if there are any inventory_reports
# try:
# report = inventory_report.objects.filter(product_id=api_values['product_id'],specification_id=api_values['specification_id'],warehouse_id=wareh['warehouse_id'],date=current_date).last()
# except:
# report = None
# if report:
# #Update the existing report
# report.credit += int(wareh['quantity'])
# report.save()
new_report = inventory_report.objects.create(product_id=int(api_valuess['product_id']),specification_id=int(api_valuess['specification_id']),warehouse_id=int(wareh['warehouse_id']),credit=int(wareh['quantity']),date=current_date,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
except:
pass
if len(api_valuess['shop']) > 0:
for shops in api_valuess['shop']:
try:
# getting the existing shop values if is there any.
print(shops['shop_id'])
shop_query = ShopInfo.objects.filter(
int(shop_id=shops['shop_id']), specification_id=int(api_valuess['specification_id'])).last()
print(shop_query)
if shop_query:
print("shop ase")
quantity_val = shop_query.quantity
new_quantity = quantity_val + int(shops['quantity'])
# shop_query.update(quantity=new_quantity)
shop_query.quantity = new_quantity
shop_query.save()
try:
product_spec = ProductSpecification.objects.get(id=int(api_values['specification_id']))
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("shop nai")
shop_data = ShopInfo.objects.create(specification_id=int(api_valuess['specification_id']), product_id=int(api_valuess['product_id']), shop_id=int(shops['shop_id']),
quantity=int(shops['quantity']))
shop_data.save()
# Updating the report table after being inserted the quantity corresponding to credit coloumn for each shop.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=shops['quantity'], shop_id=shops['shop_id'])
# report_data.save()
try:
product_spec = ProductSpecification.objects.get(id=int(api_valuess['specification_id']))
except:
product_spec = None
if product_spec:
product_spec.save()
new_report = inventory_report.objects.create(product_id=int(api_valuess['product_id']),specification_id=int(api_valuess['specification_id']),shop_id=int(shops['shop_id']),credit=int(shops['quantity']),date=current_date,purchase_price=purchase_price,selling_price=selling_price)
new_report.save()
except:
pass
mflag = True
# return Response({
# "success": True,
# "message": "Data has been added successfully"
# })
except:
# return Response({
# "success": False,
# "message": "Something went wrong !!"
# })
mflag = False
print(i)
print(mflag)
if mflag == True:
try:
order_details = OrderDetails.objects.get(id=int(api_valuess["order_details_id"]))
except:
order_details = None
if order_details:
order_details.admin_status = "Approved"
order_details.save()
print(i)
print(order_details)
else:
return Response({
"success": False,
"message": "Something went wrong.Order could not be approved!!"
})
try:
product_specification = ProductSpecification.objects.get(id=int(api_valuess["specification_id"]))
except:
product_specification = None
if product_specification:
product_specification.admin_status = "Confirmed"
product_specification.save()
quantity_flag = quantity_flag + 1
print(i)
print(product_specification)
print(quantity_flag)
else:
return Response({
"success": False,
"message": "Something went wrong.Specification of this product could not be approved!!"
})
#return JsonResponse({"success":True,"message":"All the quantities have been added with their prices and the order item has been approved and the specification has been approved"})
main_flag = True
print(i)
print("main flag true")
else:
main_flag = False
# return Response({
# "success": False,
# "message": "Something went wrong !!"
# })
else:
main_flag = False
if main_flag == False:
return JsonResponse({"success":False,"message":"The product point,discount or delivery info could not be added"})
elif api_values[i]["product_status"] == "Cancelled":
print("wbefuefbewqufgbewqufbeqwufvbweufwebfuwegbfuwefbweufb")
print(i)
print("product status cancelled")
#Fetch the ordel details item and change its status
order_dets_id = int(api_values[i]["order_details_id"])
print("order_dets_id")
print(order_dets_id)
try:
order_dets = OrderDetails.objects.get(id=order_dets_id)
except:
order_dets = None
print(i)
print("order_dets")
print(order_dets)
if order_dets:
order_dets.product_status = "Cancelled"
order_dets.admin_status = "Cancelled"
order_dets.save()
main_flag = True
else:
main_flag = False
print(main_flag)
if main_flag == False:
return JsonResponse({"success":False,"message":"Something went wrong while cancelling an order"})
else:
main_flag = False
if main_flag == True:
print("quantity_flag")
print("main_flag tryue hoise")
print(order_id)
print(quantity_flag)
if quantity_flag > 0:
#Approve the order
try:
order = Order.objects.get(id=order_id)
except:
order = None
if order:
order.admin_status = "Confirmed"
order.save()
return JsonResponse({"success":True,"message":"All the data has been inserted and the order has been approved"})
else:
return JsonResponse({"success":False,"message":"The order could not be approved"})
else:
try:
order = Order.objects.get(id=order_id)
except:
order = None
if order:
order.admin_status = "Cancelled"
order.save()
return JsonResponse({"success":False,"message":"None of the products were approved and the invoice is cancelled"})
else:
return JsonResponse({"success":False,"message":"The order could not be approved"})
#approve the order
# try:
# order = Order.objects.g
except:
return JsonResponse({"success":False,"message":"Something went wrong in the main method"})
def admin_approve_add_merchant_specification(value):
current_date = current
discount_type = value["discount_type"]
discount_amount = value["discount_amount"]
discount_start_date = value["discount_start_date"]
discount_end_date = value["discount_end_date"]
point_amount = value["point_amount"]
point_start_date = value["point_start_date"]
point_end_date = value["point_end_date"]
specification_id = value['specification_id']
product_id = value['product_id']
if discount_type == "none" or discount_amount == '' or discount_start_date == '' or discount_end_date == '':
discount_flag = False
else:
discount_flag = True
if ((point_amount == "") or (point_start_date == "") or (point_end_date == "")):
point_flag = False
# print("False")
else:
point_flag = True
product_discount = {
'product_id': product_id,
'amount': discount_amount,
'discount_type': discount_type,
'start_date': value["discount_start_date"],
'end_date': value["discount_end_date"],
'specification_id': specification_id
}
product_point = {
'product_id': product_id,
'point': value["point_amount"],
'start_date': value["point_start_date"],
'end_date': value["point_end_date"],
'specification_id': specification_id
}
delivery_id = 0
discount_id = 0
point_id = 0
price_id = 0
flag = 0
spec = {}
price = {}
discount = {}
point = {}
delivery = {}
code={}
try:
if discount_flag == False:
discount = {}
else:
product_dis = ProductDiscountSerializer(data=product_discount)
if product_dis.is_valid():
product_dis.save()
discount.update(product_dis.data)
discount_id = discount["id"]
else:
discount_id = 0
flag = flag+1
if point_flag == False:
point = {}
else:
product_point_value = ProductPointSerializer(data=product_point)
if product_point_value.is_valid():
product_point_value.save()
point.update(product_point_value.data)
point_id = point["id"]
else:
point_id = 0
flag = flag+1
data_val = {
'option' : value["option"],
'spec': specification_id,
'arrayForDelivery': value["arrayForDelivery"]
# 'arrayForDelivery': [
# {
# 'selectedDistrict': 'Dhaka',
# 'selectedThana':[
# 'Banani',
# 'Gulshan',
# 'Rampura',
# 'Dhanmondi'
# ]
# },
# {
# 'selectedDistrict': 'Khulna',
# 'selectedThana':[
# 'Hizla',
# 'Muladi',
# 'Borguna',
# 'Betagi'
# ]
# }
# ]
}
value = add_delivery_data(data_val)
if flag > 0:
try:
poi = ProductPoint.objects.get(id=point_id)
except:
poi = None
if poi:
poi.delete()
try:
dis = discount_product.objects.get(id=discount_id)
except:
dis = None
if dis:
dis.delete()
return False
else:
return True
except:
try:
poi = ProductPoint.objects.get(id=point_id)
except:
poi = None
if poi:
poi.delete()
try:
dis = discount_product.objects.get(id=discount_id)
except:
dis = None
if dis:
dis.delete()
return False
@api_view(["GET",])
def individual_seller_spec(request,specification_id):
try:
product_spec = ProductSpecification.objects.get(id=specification_id)
except:
product_spec = None
if product_spec:
prod_serializer = SellerSpecificationSerializer(product_spec,many=False)
p_data = prod_serializer.data
else:
p_data = {}
return JsonResponse({"success":True,"message":"The info is shown","data":p_data})
@api_view(["GET",])
def get_delivery_info(request,specification_id):
main_data = []
try:
delivery_places = product_delivery_area.objects.filter(specification_id = specification_id)
except:
delivery_places = None
print(delivery_places)
if delivery_places:
area_ids = list(delivery_places.values_list('delivery_area_id',flat=True))
if -1 in area_ids:
area_ids.remove(-1)
print(area_ids)
if len(area_ids) < 1:
main_data = []
# print(area_ids)
else:
for i in range(len(area_ids)):
try:
product_areas = product_delivery_area.objects.get(specification_id = specification_id,delivery_area_id=area_ids[i])
except:
product_areas = None
print(product_areas)
if product_areas:
location_ids = product_areas.delivery_location_ids
else:
location_ids = []
try:
area_name = DeliveryArea.objects.get(id=area_ids[i])
except:
area_name = None
if area_name:
selected_district = area_name.Area_name
else:
selected_district = ""
selected_thanas = []
print("location_ids")
print(location_ids)
for j in range(len(location_ids)):
try:
deli_loc = DeliveryLocation.objects.get(id = int(location_ids[j]))
except:
deli_loc = None
print(deli_loc)
print("deli_loc")
if deli_loc:
loc_name = deli_loc.location_name
else:
loc_name = ""
selected_thanas.append(loc_name)
all_thanas = []
try:
all_locs = DeliveryLocation.objects.filter(area_id = area_ids[i])
except:
all_locs = None
if all_locs:
all_locs_ids = list(all_locs.values_list('id',flat=True))
all_locs_names = list(all_locs.values_list('location_name',flat=True))
else:
all_locs_ids = []
all_locs_names =[]
for k in range(len(all_locs_names)):
loc_dic = {"location_name":all_locs_names[k]}
all_thanas.append(loc_dic)
main_dic = {"selectedDistrict":selected_district,"selectedThana":selected_thanas,"thanas":all_thanas}
main_data.append(main_dic)
else:
main_data = []
return JsonResponse({"data": main_data})
@api_view(["POST",])
def verify_pos(request):
term_data = {}
API_key = request.data.get("API_key")
try:
term = Terminal.objects.all()
except:
term = None
if term:
term_ids = list(term.values_list('id',flat=True))
for i in range(len(term_ids)):
try:
specific_term = Terminal.objects.get(id=term_ids[i])
except:
specific_term = None
if specific_term:
if specific_term.API_key == API_key:
term_serializer = TerminalSerializer(specific_term,many=False)
term_data = term_serializer.data
break
else:
pass
else:
pass
else:
pass
if term_data == {}:
return JsonResponse({"success":False,"message":"The API key provided does not exist","data":term_data})
else:
return JsonResponse({"success":True,"message":"Installation successful","data":term_data})
@api_view(["GET",])
def warehouse_shop_info(request):
warehouses = []
shops = []
try:
warehouse = Warehouse.objects.all()
except:
warehouse = None
try:
shop = Shop.objects.all()
except:
shop = None
if warehouse:
warehouse_serializer = WSerializer(warehouse,many=True)
warehouse_data = warehouse_serializer.data
if shop:
shop_serializer = SSerializer(shop,many=True)
shop_data = shop_serializer.data
return JsonResponse({"success":True,"message":"The data is shown below","warehouses":warehouse_data,"shops":shop_data})
@api_view(["POST",])
def create_terminal(request):
# warehouses = []
# shops = []
terminal_name = request.data.get("terminal_name")
warehouse_id = request.data.get("warehouse_id")
shop_id = request.data.get("shop_id")
admin_id = request.data.get("admin_id")
if warehouse_id == "":
if shop_id:
s_id = shop_id
w_id = -1
elif shop_id == "":
if warehouse_id:
w_id = warehouse_id
s_id = -1
main_data = {"terminal_name":terminal_name,"warehouse_id":w_id,"shop_id":s_id,"admin_id":admin_id}
terminal = Terminal.objects.create(terminal_name = terminal_name,warehouse_id = w_id,shop_id = s_id, admin_id = admin_id)
terminal.save()
term_id = terminal.id
print("terminalid")
print(term_id)
try:
terminal = Terminal.objects.get(id=term_id)
except:
terminal = None
if terminal:
terminal_serializer = TerminalSerializer(terminal,many=False)
term_data = terminal_serializer.data
return JsonResponse({"success":True,"message":"Terminal is created","data":term_data})
else:
return JsonResponse({"success":False,"message":"Terminal is not created"})
# term_serializer = TerminalSerializer(data=main_data)
# if term_serializer.is_valid():
# term_serializer.save()
# term_id = term_serializer.data["id"]
# try:
# terminal = Terminal.objects.get(id=int(term_id))
# except:
# terminal = None
# if terminal:
# terminal.save()
# else:
# return JsonResponse({"success":False,"message":"Terminal is not created"})
@api_view(["GET",])
def terminal_list(request):
try:
terminals = Terminal.objects.all()
except:
terminals = None
if terminals:
term_serializer = TerminalSerializer(terminals,many=True)
term_data = term_serializer.data
return JsonResponse({"success":True,"message":"Data is shown","data":term_data})
else:
return JsonResponse({"success":False,"message":"Data doesnt exist"})
#This is for the admin panel.Admin will use this to create a user
@api_view (["POST",])
def create_pos_user(request,terminal_id):
email = request.data.get('email')
password = request.data.get('password')
role = request.data.get('role')
pwd = make_password(password)
username = request.data.get('username')
phone_number = request.data.get('phone_number')
if username is None:
username = ""
if phone_number is None:
phone_number = ""
#Create an user
new_user = User.objects.create(email=email,password=pwd,pwd=password,role=role,is_staff=False,is_verified=True,is_active=True,username=username,phone_number=phone_number)
new_user.save()
user_id = new_user.id
email = new_user.email
print(new_user)
data = {'email':email,'password':pwd,'pwd':password,'role':role,'is_staff':False,'is_verified':True,'is_active':True,'username':username,'phone_number':phone_number}
new_serializer = UserSerializerz(new_user,data=data)
if new_serializer.is_valid():
new_serializer.save()
# balance_values = {'user_id':user_id}
# create_user_balance(balance_values)
profile_values ={'user_id':user_id,'email':email}
create_user_profile(profile_values)
data = new_serializer.data
#Insertion in the TerminalUsers table
terminal_user = TerminalUsers.objects.create(terminal_id=terminal_id,user_id=user_id,is_active=True)
terminal_user.save()
# try:
# current_user = User.objects.get(id=user_id)
# except:
# current_user = None
# if current_user:
# new_serializer = UserSerializerz(new_user,many=False)
# data = new_serializer.data
# else:
# data = {}
return Response(
{
'success': True,
'message': 'User has been created',
'data' : data,
# 'encrypted_password': data["password"],
'password': password
})
else:
print(new_serializer.errors)
return Response(
{
'success': False,
'message': 'Could not create user',
})
def make_terminal_active_inactive(request,terminal_id):
try:
terminal = Terminal.objects.get(id=terminal_id)
except:
terminal = None
print(terminal)
if terminal:
if terminal.is_active == True:
print("is true")
terminal.is_active = False
terminal.save()
return JsonResponse({"success":True,"message":"The active status has been changed","is_active":False})
elif terminal.is_active == False:
terminal.is_active = True
terminal.save()
return JsonResponse({"success":True,"message":"The active status has been changed","is_active":True})
else:
return JsonResponse({"success":False,"message":"The terminal does not exist"})
def make_user_active_inactive(request,user_id,terminal_id):
try:
terminal = TerminalUsers.objects.get(terminal_id=terminal_id,user_id=user_id)
except:
terminal = None
print(terminal)
if terminal:
if terminal.is_active == True:
print("is true")
terminal.is_active = False
terminal.save()
return JsonResponse({"success":True,"message":"The active status has been changed","is_active":False})
elif terminal.is_active == False:
terminal.is_active = True
terminal.save()
return JsonResponse({"success":True,"message":"The active status has been changed","is_active":True})
else:
return JsonResponse({"success":False,"message":"The user does not exist"})
@api_view (["POST",])
def insert_specification_price(request,specification_id):
# data = {
# "MRP": 25.00,
# "data_array" : [{
# "status": "Single",
# "quantity": 1,
# "purchase_price": 300.0,
# "selling_price": 350.0,
# },
# {
# "status": "Minimum",
# "quantity": 10,
# "purchase_price": 300.0,
# "selling_price": 350.0,
# },
# {
# "status": "Maximum",
# "quantity": 100,
# "purchase_price": 300.0,
# "selling_price": 350.0,
# }]
# }
data = request.data
print(data)
try:
prod_specz = ProductSpecification.objects.get(id=specification_id)
except:
prod_specz = None
if prod_specz:
shared_status = prod_specz.shared
if shared_status == False:
MRP = data["MRP"]
data_info = data["data_array"]
ids = []
for i in range(len(data_info)):
spec_price = SpecificationPrice.objects.create(specification_id = specification_id, mrp = MRP, status = data_info[i]["status"],quantity = data_info[i]["quantity"],purchase_price = data_info[i]["purchase_price"],selling_price = data_info[i]["selling_price"] )
spec_price.save()
spec_id = spec_price.id
ids.append(spec_id)
try:
specs = SpecificationPrice.objects.filter(id__in=ids,is_active = True)
except:
specs = None
if specs:
specs_serializer = MaxMinSerializer(specs,many=True)
specs_data = specs_serializer.data
#Change the specification status
# try:
# specific_spec = ProductSpecification.objects.get(id=specification_id)
# except:
# specific_spec = None
# if specific_spec:
# specific_spec.
try:
prod_spec = ProductSpecification.objects.get(id=specification_id)
except:
prod_spec = None
if prod_spec:
try:
prod = Product.objects.get(id = prod_spec.product_id)
except:
prod = None
if prod:
prod.shared = True
prod.save()
prod_spec.shared = True
prod_spec.save()
spec_serializer = MotherSpecificationSerializer(prod_spec,many=False)
spec_data = spec_serializer.data
else:
spec_data = {}
print(spec_data)
# specc_data = json.loads(spec_data)
spec_dataz = json.dumps(spec_data)
url = site_path + "productdetails/insert_child_product_info/"
headers = {'Content-Type': 'application/json',}
dataz = requests.post(url = url, headers=headers,data = spec_dataz)
# print(dataz)
dataz = dataz.json()
# print(dataz)
if dataz["success"] == True:
return JsonResponse({"success":True,"message":"Data has been inserted","data": specs_data,"product_info":spec_data})
else:
#Delete the max min values
prod_spec.shared = False
prod_spec.save()
try:
max_del = SpecificationPrice.objects.filter(id__in = ids)
except:
max_del = None
if max_del:
max_del.delete()
return JsonResponse({"success":True,"message":"Data could not be inserted in mothersite","data": specs_data,"product_info":spec_data})
else:
return JsonResponse({"success":False,"message":"Data could not be inserted"})
else:
return JsonResponse({"success":False,"message":"This product has already been shared before"})
else:
return JsonResponse({"success":False,"message":"This product does not exist"})
@api_view(["GET", ])
def mothersite_approval_response(request,specification_id):
try:
specs = ProductSpecification.objects.get(id=specification_id)
except:
specs = None
if specs:
# try:
# prod = Product.objects.get(specs.product_id)
# except:
# prod = None
# if prod:
# prod.product_admin_status = "Cancelled"
specs.mother_status = "Confirmed"
specs.save()
return JsonResponse({"success":True,"message":"Mother Site has approved this product"})
else:
return JsonResponse({"success":False,"message":"The product does not exist"})
@api_view(["GET", ])
def mothersite_cancelled_response(request,specification_id):
try:
specs = ProductSpecification.objects.get(id=specification_id)
except:
specs = None
if specs:
# try:
# prod = Product.objects.get(specs.product_id)
# except:
# prod = None
# if prod:
# prod.product_admin_status = "Cancelled"
specs.mother_status = "Cancelled"
specs.save()
return JsonResponse({"success":True,"message":"Mother Site has cancelled this product"})
else:
return JsonResponse({"success":False,"message":"The product does not exist"})
@api_view(["GET", ])
def all_shared_motherproducts(request):
try:
specs = ProductSpecification.objects.filter(is_own=False)
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=True)
return JsonResponse({"success":True,"message":"Specifications are displayed","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
@api_view(["GET", ])
def all_shared_products(request):
try:
specs = ProductSpecification.objects.filter(shared = True)
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=True)
return JsonResponse({"success":True,"message":"Specifications are displayed","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
@api_view(["GET", ])
def approved_shared_products(request):
try:
specs = ProductSpecification.objects.filter(shared = True,mother_status="Confirmed")
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=True)
return JsonResponse({"success":True,"message":"Specifications are displayed","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
@api_view(["GET", ])
def pending_shared_products(request):
try:
specs = ProductSpecification.objects.filter(shared= True,mother_status="Processing")
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=True)
return JsonResponse({"success":True,"message":"Specifications are displayed","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
@api_view(["GET", ])
def cancelled_shared_products(request):
try:
specs = ProductSpecification.objects.filter(shared= True,mother_status="Cancelled")
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=True)
return JsonResponse({"success":True,"message":"Specifications are displayed","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
@api_view(["GET", ])
def all_mothersite_products(request):
try:
company= CompanyInfo.objects.all()
except:
company = None
if company:
company = company[0]
site_id = company.site_identification
else:
site_id = ""
print(site_id)
print(type(site_id))
url = site_path + "productdetails/all_mothersite_products/" +str(site_id)+ "/"
mother_response = requests.get(url = url)
mother_data = mother_response.json()
if mother_data["success"] == True:
all_products = mother_data["data"]
return JsonResponse({"success":True,"message":"Mother Site products are shown","data":all_products})
else:
return JsonResponse({"success":False,"message":"There are no mother site products to show"})
@api_view(["GET", ])
def individual_specs(request,specification_id):
try:
specs = ProductSpecification.objects.get(id = specification_id)
except:
specs = None
if specs:
specs_serializer = MotherSpecificationSerializer(specs,many=False)
return JsonResponse({"success":True,"message":"Individual data is shown","data":specs_serializer.data})
else:
return JsonResponse({"success": False,"message":"There is no data to show"})
# @api_view(["POST", ])
# def bring_product(request,mother_specification_id):
# # data = [{
# # "status": "Single",
# # "quantity": 1,
# # "purchase_price": 300.0,
# # "selling_price": 0.0,
# # "MRP": 1125.00,
# # "increament_type": "Percentage",
# # "increament_value": 10.0,
# # },
# # {
# # "status": "Minimum",
# # "quantity": 10,
# # "purchase_price": 280.0,
# # "selling_price": 0.0,
# # "MRP": 1125.00,
# # "increament_type": "Percentage",
# # "increament_value": 10.0,
# # },
# # {
# # "status": "Maximum",
# # "quantity": 100,
# # "purchase_price": 30000.0,
# # "selling_price": 0.0,
# # "MRP": 111125.00,
# # "increament_type": "Percentage",
# # "increament_value": 10.0,
# # }]
# data = request.data
# MRP_flag = 1
# purchase_price = float(data[0]["purchase_price"])
# selling_price = float(data[0]["MRP"])
# main_product_id = 0
# main_specification_id = 0
# for k in range(len(data)):
# if data[k]["MRP"] >= data[k]["purchase_price"]:
# MRP_flag = 1
# else:
# MRP_flag = 0
# break
# if MRP_flag == 1:
# try:
# company= CompanyInfo.objects.all()
# except:
# company = None
# if company:
# company = company[0]
# site_id = company.site_identification
# else:
# site_id = ""
# print("site_id")
# print(site_id)
# try:
# prod_specz = ProductSpecification.objects.all()
# except:
# prod_specz = None
# if prod_specz:
# all_mother_specification_ids = list(prod_specz.values_list('mother_specification_id',flat=True))
# else:
# all_mother_specification_ids = []
# print(all_mother_specification_ids)
# if mother_specification_id not in all_mother_specification_ids:
# specification_id = mother_specification_id
# url = site_path + "productdetails/individual_specs/" +str(specification_id)+ "/"
# mother_response = requests.get(url = url)
# mother_data = mother_response.json()
# if mother_data["success"] == True:
# data = mother_data["data"]
# print("main data")
# print(data)
# mother_product_id = data["product_data"]["id"]
# try:
# product = Product.objects.get(mother_product_id=mother_product_id)
# except:
# product = None
# if product:
# # return JsonResponse({"success":False})
# print("product already stored")
# product_id = product.id
# main_product_id = product_id
# spec_data = {"product_id": product_id, "size": data["size"], "unit": data["unit"], "weight": data["weight"], "color": data["color"], "warranty": data["warranty"],
# "warranty_unit": data["warranty_unit"], "vat": float(data["vat"]), "weight_unit": data["weight_unit"], "manufacture_date": data["manufacture_date"], "expire": data["expire"],"is_own":False,
# "mother_status":"Confirmed","admin_status":"Confirmed","mother_specification_id":data["id"]}
# spec_info = insert_specification_data(spec_data)
# print("spec_info")
# print(spec_info)
# specification_id = spec_info["data"]["id"]
# main_specification_id = specification_id
# data["product_code"]["specification_id"] = specification_id
# data["product_code"]["product_id"] = product_id
# code_info = insert_code_data(data["product_code"])
# print("code_info")
# print(code_info)
# data["delivery_info"]["specification_id"] = specification_id
# delivery_info = insert_delivery_data(data["delivery_info"])
# print("dekivery_info")
# print(delivery_info)
# for i in range(len(data["max_min"])):
# data["max_min"][i]["specification_id"] = specification_id
# data["max_min"][i]["mother_specification_id"] = data["id"]
# data["max_min"][i]["is_own"] = False
# max_min_info = insert_max_min_info(data["max_min"])
# print("max")
# print(max_min_info)
# if spec_info["flag"] == True and code_info["flag"] == True and delivery_info["flag"] == True and max_min_info["flag"] == True:
# print("shob true hoise")
# main_flag = True
# mother_spec_id = data["id"]
# print(mother_spec_id)
# print(site_id)
# url = site_path + "productdetails/track_sharing/"+str(mother_spec_id)+"/"+str(site_id)+ "/"
# mother_responses = requests.get(url = url)
# print()
# mother_datas = mother_responses.json()
# if mother_datas["success"] == True:
# #Insert the mrp
# for i in range(len(data)):
# specification_price = SpecificationPrice.objects.create(specification_id=specification_id,status=data[i]["status"],quantity=int(data[i]["quantity"]),purchase_price=float(data[i]["purchase_price"]),selling_price=float(data[i]["selling_price"]),mrp=float(data[i]["MRP"]),is_active=True,is_own=False)
# specification_price.save()
# #Insert the price
# spec_price = ProductPrice.objects.create(specification_id=main_specification_id,product_id=main_product_id,price=selling_price,purchase_price=purchase_price)
# spec_price.save()
# return JsonResponse({"success": True,"message":"Data have been inserted.Product info and product image info has been added before.","spec":spec_info,"code":code_info,"delivery_info":delivery_info,"max_min_info":max_min_info})
# else:
# return JsonResponse({"success":False,"message": "Data was inserted nut the tracking info was not stored"})
# else:
# return JsonResponse({"success":False,"message":"Data could not be inserted"})
# else:
# prod_data = insert_product_data(
# data["product_data"], data["category_data"], data["site_id"])
# product_id = prod_data["data"]["id"]
# main_product_id = main_product_id
# product_name = prod_data["data"]["title"]
# print(product_name)
# print(product_id)
# image_data = insert_product_image( data["product_images"],product_id,product_name)
# spec_data = {"product_id": product_id, "size": data["size"], "unit": data["unit"], "weight": data["weight"], "color": data["color"], "warranty": data["warranty"],
# "warranty_unit": data["warranty_unit"], "vat": float(data["vat"]), "weight_unit": data["weight_unit"], "manufacture_date": data["manufacture_date"], "expire": data["expire"],"is_own":False,
# "mother_status":"Confirmed","admin_status":"Confirmed","mother_specification_id":data["id"]}
# spec_info = insert_specification_data(spec_data)
# specification_id = spec_info["data"]["id"]
# main_specification_id = specification_id
# data["product_code"]["specification_id"] = specification_id
# data["product_code"]["product_id"] = product_id
# code_info = insert_code_data(data["product_code"])
# data["delivery_info"]["specification_id"] = specification_id
# delivery_info = insert_delivery_data(data["delivery_info"])
# for i in range(len(data["max_min"])):
# data["max_min"][i]["specification_id"] = specification_id
# data["max_min"][i]["mother_specification_id"] = data["id"]
# data["max_min"][i]["is_own"] = False
# max_min_info = insert_max_min_info(data["max_min"])
# if prod_data["flag"] == True and spec_info["flag"] == True and code_info["flag"] == True and delivery_info["flag"] == True and max_min_info["flag"] == True:
# main_flag = True
# mother_spec_id = data["id"]
# url = site_path + "productdetails/track_sharing/"+str(mother_spec_id)+"/"+str(site_id)+ "/"
# mother_responses = requests.get(url = url)
# mother_datas = mother_responses.json()
# if mother_datas["success"] == True:
# #Insert the mrp
# for i in range(len(data)):
# specification_price = SpecificationPrice.objects.create(specification_id=specification_id,status=data[i]["status"],quantity=int(data[i]["quantity"]),purchase_price=float(data[i]["purchase_price"]),selling_price=float(data[i]["selling_price"]),mrp=float(data[i]["MRP"]),is_active=True,is_own=False)
# specification_price.save()
# #Insert the price
# spec_price = ProductPrice.objects.create(specification_id=main_specification_id,product_id=main_product_id,price=selling_price,purchase_price=purchase_price)
# spec_price.save()
# return JsonResponse({"success": True,"message":"Data have been inserted","product": prod_data,"spec":spec_info,"code":code_info,"delivery_info":delivery_info,"max_min_info":max_min_info,"product_image":image_data})
# else:
# return JsonResponse({"success":False,"message": "Data was inserted nut the tracking info was not stored"})
# else:
# return JsonResponse({"success":False,"message":"Data could not be inserted"})
# else:
# return JsonResponse({"success":False,"message":"Data could not be retrieved from mother site"})
# else:
# return JsonResponse({"success":False,"message":"This specfication had already been shared before"})
# else:
# return JsonResponse({"success":False,"message":'The MRP provided is less than the purchase price'})
@api_view(["POST", ])
def bring_product(request,mother_specification_id):
# data = [{
# "status": "Single",
# "quantity": 1,
# "purchase_price": 300.0,
# "selling_price": 0.0,
# "MRP": 1125.00,
# "increament_type": "Percentage",
# "increament_value": 10.0,
# },
# {
# "status": "Minimum",
# "quantity": 10,
# "purchase_price": 280.0,
# "selling_price": 0.0,
# "MRP": 1125.00,
# "increament_type": "Percentage",
# "increament_value": 10.0,
# },
# {
# "status": "Maximum",
# "quantity": 100,
# "purchase_price": 30000.0,
# "selling_price": 0.0,
# "MRP": 111125.00,
# "increament_type": "Percentage",
# "increament_value": 10.0,
# }]
data = request.data
dataX = data
print(data)
MRP_flag = 1
purchase_price = float(data[0]["purchase_price"])
selling_price = float(data[0]["MRP"])
main_product_id = 0
main_specification_id = 0
for k in range(len(data)):
if data[k]["MRP"] >= data[k]["purchase_price"]:
MRP_flag = 1
else:
MRP_flag = 0
break
if MRP_flag == 1:
try:
company= CompanyInfo.objects.all()
except:
company = None
if company:
company = company[0]
site_id = company.site_identification
else:
site_id = ""
print("site_id")
print(site_id)
try:
prod_specz = ProductSpecification.objects.all()
except:
prod_specz = None
if prod_specz:
all_mother_specification_ids = list(prod_specz.values_list('mother_specification_id',flat=True))
else:
all_mother_specification_ids = []
print(all_mother_specification_ids)
if mother_specification_id not in all_mother_specification_ids:
specification_id = mother_specification_id
url = site_path + "productdetails/individual_specs/" +str(specification_id)+ "/"
mother_response = requests.get(url = url)
mother_data = mother_response.json()
if mother_data["success"] == True:
data = mother_data["data"]
print("main data")
print(data)
mother_product_id = data["product_data"]["id"]
try:
product = Product.objects.get(mother_product_id=mother_product_id)
except:
product = None
if product:
# return JsonResponse({"success":False})
print("product already stored")
product_id = product.id
main_product_id = product_id
spec_data = {"product_id": product_id, "size": data["size"], "unit": data["unit"], "weight": data["weight"], "color": data["color"], "warranty": data["warranty"],
"warranty_unit": data["warranty_unit"], "vat": float(data["vat"]), "weight_unit": data["weight_unit"], "manufacture_date": data["manufacture_date"], "expire": data["expire"],"is_own":False,
"mother_status":"Confirmed","admin_status":"Confirmed","mother_specification_id":data["id"]}
spec_info = insert_specification_data(spec_data)
print("spec_info")
print(spec_info)
specification_id = spec_info["data"]["id"]
main_specification_id = specification_id
data["product_code"]["specification_id"] = specification_id
data["product_code"]["product_id"] = product_id
code_info = insert_code_data(data["product_code"])
print("code_info")
print(code_info)
data["delivery_info"]["specification_id"] = specification_id
delivery_info = insert_delivery_data(data["delivery_info"])
print("dekivery_info")
print(delivery_info)
for i in range(len(data["max_min"])):
data["max_min"][i]["specification_id"] = specification_id
data["max_min"][i]["mother_specification_id"] = data["id"]
data["max_min"][i]["is_own"] = False
max_min_info = insert_max_min_info(data["max_min"])
print("max")
print(max_min_info)
if spec_info["flag"] == True and code_info["flag"] == True and delivery_info["flag"] == True and max_min_info["flag"] == True:
print("shob true hoise")
main_flag = True
mother_spec_id = data["id"]
print(mother_spec_id)
print(site_id)
url = site_path + "productdetails/track_sharing/"+str(mother_spec_id)+"/"+str(site_id)+ "/"
mother_responses = requests.get(url = url)
print()
mother_datas = mother_responses.json()
if mother_datas["success"] == True:
#Insert the mrp
print("databefore")
print(data)
for i in range(len(dataX)):
specification_price = SpecificationPrice.objects.create(specification_id=specification_id,status=dataX[i]["status"],quantity=int(dataX[i]["quantity"]),purchase_price=float(dataX[i]["purchase_price"]),selling_price=float(dataX[i]["selling_price"]),mrp=float(dataX[i]["MRP"]),is_active=True,is_own=False)
specification_price.save()
#Insert the price
spec_price = ProductPrice.objects.create(specification_id=main_specification_id,product_id=main_product_id,price=selling_price,purchase_price=purchase_price)
spec_price.save()
return JsonResponse({"success": True,"message":"Data have been inserted.Product info and product image info has been added before.","spec":spec_info,"code":code_info,"delivery_info":delivery_info,"max_min_info":max_min_info})
else:
return JsonResponse({"success":False,"message": "Data was inserted nut the tracking info was not stored"})
else:
return JsonResponse({"success":False,"message":"Data could not be inserted"})
else:
prod_data = insert_product_data(
data["product_data"], data["category_data"], data["site_id"])
product_id = prod_data["data"]["id"]
main_product_id = main_product_id
product_name = prod_data["data"]["title"]
print(product_name)
print(product_id)
image_data = insert_product_image( data["product_images"],product_id,product_name)
spec_data = {"product_id": product_id, "size": data["size"], "unit": data["unit"], "weight": data["weight"], "color": data["color"], "warranty": data["warranty"],
"warranty_unit": data["warranty_unit"], "vat": float(data["vat"]), "weight_unit": data["weight_unit"], "manufacture_date": data["manufacture_date"], "expire": data["expire"],"is_own":False,
"mother_status":"Confirmed","admin_status":"Confirmed","mother_specification_id":data["id"]}
spec_info = insert_specification_data(spec_data)
specification_id = spec_info["data"]["id"]
main_specification_id = specification_id
data["product_code"]["specification_id"] = specification_id
data["product_code"]["product_id"] = product_id
code_info = insert_code_data(data["product_code"])
data["delivery_info"]["specification_id"] = specification_id
delivery_info = insert_delivery_data(data["delivery_info"])
for i in range(len(data["max_min"])):
data["max_min"][i]["specification_id"] = specification_id
data["max_min"][i]["mother_specification_id"] = data["id"]
data["max_min"][i]["is_own"] = False
max_min_info = insert_max_min_info(data["max_min"])
if prod_data["flag"] == True and spec_info["flag"] == True and code_info["flag"] == True and delivery_info["flag"] == True and max_min_info["flag"] == True:
main_flag = True
mother_spec_id = data["id"]
url = site_path + "productdetails/track_sharing/"+str(mother_spec_id)+"/"+str(site_id)+ "/"
mother_responses = requests.get(url = url)
mother_datas = mother_responses.json()
if mother_datas["success"] == True:
#Insert the mrp
# print("data")
# print(data)
for i in range(len(dataX)):
# print(specification_id)
# print(data[i]["status"])
# print(data[i]["quantity"])
# print(data[i]["purchase_price"])
# print(data[i]["selling_price"])
# print(data[i]["MRP"])
specification_price = SpecificationPrice.objects.create(specification_id=specification_id,status=dataX[i]["status"],quantity=int(dataX[i]["quantity"]),purchase_price=float(dataX[i]["purchase_price"]),selling_price=float(dataX[i]["selling_price"]),mrp=float(dataX[i]["MRP"]),is_active=True,is_own=False)
specification_price.save()
#Insert the price
spec_price = ProductPrice.objects.create(specification_id=main_specification_id,product_id=main_product_id,price=selling_price,purchase_price=purchase_price)
spec_price.save()
return JsonResponse({"success": True,"message":"Data have been inserted","product": prod_data,"spec":spec_info,"code":code_info,"delivery_info":delivery_info,"max_min_info":max_min_info,"product_image":image_data})
else:
return JsonResponse({"success":False,"message": "Data was inserted nut the tracking info was not stored"})
else:
return JsonResponse({"success":False,"message":"Data could not be inserted"})
else:
return JsonResponse({"success":False,"message":"Data could not be retrieved from mother site"})
else:
return JsonResponse({"success":False,"message":"This specfication had already been shared before"})
else:
return JsonResponse({"success":False,"message":'The MRP provided is less than the purchase price'})
def insert_product_image(product_images,product_id,product_name):
image_data = []
for i in range(len(product_images)):
prod_image = ProductImage.objects.create(product_id = product_id ,content = product_images[i]["content"], mother_url = product_images[i]["image_url"], is_own=False)
prod_image.save()
prod_image_id = prod_image.id
#image_url = 'http://whatever.com/image.jpg'
image_url = prod_image.mother_url
r = requests.get(image_url)
# img_temp = NamedTemporaryFile()
# img_temp.write(urlopen(image_url).read())
# img_temp.flush()
img_temp = NamedTemporaryFile()
img_temp.write(r.content)
img_temp.flush()
image_name = product_name + str(prod_image_id)+".jpg"
prod_image.product_image.save(image_name, File(img_temp), save=True)
# prod_image.product_image.save("image_%s" % prod_image.pk, ImageFile(img_temp))
# response = requests.get(image_url)
# img = Image.open(BytesIO(response.content))
# prod_image.product_image = img
# prod_image.save()
prod_image_serializer = MotherProductImageCreationSerializer(prod_image,many=False)
im_data = prod_image_serializer.data
image_data.append(im_data)
return ({"flag":True,"data":image_data})
def insert_product_data(product_data, category_data, site_data):
# print(product_data)
# print(category_data)
category_ids = category1_data_upload(category_data)
cat_data = category_ids.json()
category_id = cat_data["category"]
sub_category_id = cat_data["sub_category"]
sub_sub_category_id = cat_data["sub_sub_category"]
is_own = False
product_admin_status = "Confirmed"
title = product_data["title"]
brand = product_data["brand"]
description = product_data["description"]
# print("description")
# print(description)
key_features = product_data["key_features"]
# print("key_features")
# print(key_features)
is_group = product_data["is_group"]
origin = product_data["origin"]
shipping_country = product_data["shipping_country"]
# mother_status = product_data["mother_status"]
mother_product_id = int(product_data["id"])
# unique_id = "X"+str(child_product_id) + "Y" + str(child_site_id)
# data_values = {"category_id": category_id, "sub_category_id": sub_category_id, "sub_sub_category_id": sub_sub_category_id,
# "is_own": False, "product_admin_status": product_admin_status, "title": title, "brand": brand, "description": description, "key_features": key_features,
# "origin": origin, "shipping_country": shipping_country, "is_group": is_group, "mother_product_id": mother_product_id,
# "mother_status": "Confirmed","product_status" :"Published"}
product = Product.objects.create(category_id = category_id,sub_category_id=sub_category_id,sub_sub_category_id=sub_sub_category_id,is_own=False,product_admin_status=product_admin_status,title=title,brand=brand,description=description,key_features=key_features,origin=origin,shipping_country=shipping_country,is_group=is_group,mother_product_id=mother_product_id,mother_status="Confimred",product_status="Published")
product.save()
p_id = product.id
try:
prod = Product.objects.get(id=p_id)
except:
prod = None
if prod:
product_serializer = ChildProductCreationSerializer(prod,many=False)
return ({"flag":True,"data":product_serializer.data})
else:
return ({"flag":False,"data":[]})
# product_serializer = ChildProductCreationSerializer(data=data_values)
# if product_serializer.is_valid():
# print("product save hochche")
# product_serializer.save()
# product_id = int(product_serializer.data["id"])
# globals()['global_product_id'] = product_id
# try:
# product = Product.objects.get(id=product_id)
# except:
# product = None
# if product:
# unique_id = "X"+str(child_product_id) + "Y" + str(child_site_id) + "Z" + str(product_id)
# product.unique_id = unique_id
# product.save()
# product_serializer = ChildProductCreationSerializer(product,many=False)
# return ({"flag":True,"data":product_serializer.data})
# else:
# print(product_serializer.errors)
# return ({"flag":False,"data":[]})
def insert_specification_data(spec_data):
specification_serializer = MotherSpecificationCreationSerializer(data=spec_data)
if specification_serializer.is_valid():
specification_serializer.save()
print("specification save hochche")
# product_id = int(product_serializer.data["id"])
#globals()['global_product_id'] = product_id
#print(product_serializer.data)
return ({"flag":True,"data": specification_serializer.data})
else:
print(specification_serializer.errors)
return ({"flag":False,"data":[]})
def insert_code_data(code_data):
specification_serializer = MotherCodeCreationSerializer(data=code_data)
if specification_serializer.is_valid():
specification_serializer.save()
# product_id = int(product_serializer.data["id"])
#globals()['global_product_id'] = product_id
#print(product_serializer.data)
return ({"flag":True,"data": specification_serializer.data})
else:
print(specification_serializer.errors)
return ({"flag":False,"data":[]})
def insert_delivery_data(delivery_data):
specification_serializer = MotherDeliveryInfoCreationSerializer(data= delivery_data)
if specification_serializer.is_valid():
specification_serializer.save()
# product_id = int(product_serializer.data["id"])
#globals()['global_product_id'] = product_id
#print(product_serializer.data)
return ({"flag":True,"data": specification_serializer.data})
else:
print(specification_serializer.errors)
return ({"flag":False,"data":[]})
def insert_max_min_info(max_min_data):
max_min = []
for i in range(len(max_min_data)):
if max_min_data[i]["status"] == "single" or max_min_data[i]["status"] == "Single" :
max_min_data[i]["status"] = "Single"
if max_min_data[i]["status"] == "min" or max_min_data[i]["status"] == "Single" :
max_min_data[i]["status"] = "Single"
specification_serializer = ChildSpecificationPriceSerializer(data= max_min_data[i])
if specification_serializer.is_valid():
specification_serializer.save()
max_min.append(specification_serializer.data)
else:
return ({"flag":False,"data":[]})
return ({"flag":True,"data":max_min})
@api_view(["GET",])
def unsharedSpecification(request):
try:
spec = ProductSpecification.objects.filter(shared=False,is_own=True)
except:
spec = None
print("this is spec data" , spec)
if spec:
spec_serializer = OwnSpecificationSerializer(spec,many=True)
spec_data = spec_serializer.data
return JsonResponse({"success":True,"message":"Data is shown","data":spec_data})
else:
return JsonResponse({"success":False,"message":"Data doesnt exist"})
@api_view(["GET"])
def own_quantity_check(request,specification_id):
try:
prod = ProductSpecification.objects.get(id=specification_id)
except:
prod = None
if prod:
if prod.is_own == True:
if prod.shared == True:
quantity = prod.quantity
return JsonResponse({"success":True,"message":"The current quantity is shown","quantity":quantity})
else:
return JsonResponse({"success":False,"message":"This product has not been shared so cannot return the quantity"})
else:
return JsonResponse({"success":False,"message":"This product is not my own product so cannot return the quantity"})
else:
return JsonResponse({"success":False,"message":"This product does not exist"})
@api_view(["GET"])
def not_own_quantity_check(request,specification_id):
try:
prod = ProductSpecification.objects.get(id=specification_id)
except:
prod = None
if prod:
if prod.is_own == False:
mother_specification_id = prod.mother_specification_id
url = site_path + "productdetails/quantity_checker/" +str(mother_specification_id)+ "/"
mother_response = requests.get(url = url)
mother_response = mother_response.json()
if mother_response["success"] == True:
quantity = mother_response["quantity"]
return JsonResponse({"success":True,"message":"The current quantity is shown","quantity":quantity})
else:
return JsonResponse({"success":False,"message":"The quantity could not be retireved."})
else:
return JsonResponse({"success":False,"message":"This product is your own product"})
else:
return JsonResponse({"success":False,"message":"This product does not exist"})
# def get_max_min_values()
@api_view(["POST"])
def update_max_min_values(request,specification_id):
# data = [
# {
# "id": 53,
# "status": "Single",
# "quantity": 1,
# "purchase_price": 300.0,
# "selling_price": 370.0,
# "mrp": 25.0,
# "is_active": True,
# "specification_id": 10,
# "is_own": True,
# "mother_specification_id": -1,
# "increament_type": "Percentage",
# "increament_value": 0.0
# },
# {
# "id": 54,
# "status": "Minimum",
# "quantity": 10,
# "purchase_price": 300.0,
# "selling_price": 370.0,
# "mrp": 25.0,
# "is_active": True,
# "specification_id": 10,
# "is_own": True,
# "mother_specification_id": -1,
# "increament_type": "Percentage",
# "increament_value": 0.0
# },
# {
# "id": 55,
# "status": "Maximum",
# "quantity": 100,
# "purchase_price": 300.0,
# "selling_price": 370.0,
# "mrp": 25.0,
# "is_active": True,
# "specification_id": 10,
# "is_own": True,
# "mother_specification_id": -1,
# "increament_type": "Percentage",
# "increament_value": 0.0
# }
# ]
data = { 'arrayForDelivery': [
{
'selectedDistrict': 'Dhaka',
'selectedThana':[
'Banani',
'Gulshan',
'Rampura',
'Dhanmondi'
]
},
{
'selectedDistrict': 'Barishal',
'selectedThana':[
'Hizla',
'Muladi',
'Borguna',
'Betagi'
]
}
],
'max_min' : [
{
"id": 53,
"status": "Single",
"quantity": 1,
"purchase_price": 300.0,
"selling_price": 370.0,
"mrp": 25.0,
"is_active": True,
"specification_id": 10,
"is_own": True,
"mother_specification_id": -1,
"increament_type": "Percentage",
"increament_value": 0.0
},
{
"id": 54,
"status": "Minimum",
"quantity": 10,
"purchase_price": 300.0,
"selling_price": 370.0,
"mrp": 25.0,
"is_active": True,
"specification_id": 10,
"is_own": True,
"mother_specification_id": -1,
"increament_type": "Percentage",
"increament_value": 0.0
},
{
"id": 55,
"status": "Maximum",
"quantity": 100,
"purchase_price": 300.0,
"selling_price": 370.0,
"mrp": 25.0,
"is_active": True,
"specification_id": 10,
"is_own": True,
"mother_specification_id": -1,
"increament_type": "Percentage",
"increament_value": 0.0
}
]
}
# data = request.data
flag = 0
spec_data = []
restore_data = []
for i in range(len(data)):
max_min_id = data[i]["id"]
max_min_data = data[i]
try:
spec_price = SpecificationPrice.objects.get(id=max_min_id)
except:
spec_price = None
if spec_price:
restore_serializer = MaxMinSerializer1(spec_price,many=False)
restore_dataz = restore_serializer.data
restore_data.append(restore_dataz)
spec_price_serializer = MaxMinSerializer1(spec_price,data=max_min_data)
if spec_price_serializer.is_valid():
spec_price_serializer.save()
flag = flag + 1
else:
return JsonResponse({"success":False,"message":"This max min value does not exist"})
try:
spec_pricez = SpecificationPrice.objects.filter(specification_id=specification_id)
except:
spec_pricez = None
if spec_pricez:
spec_pricez_serializer = MaxMinSerializer(spec_pricez,many=True)
spec_data = spec_pricez_serializer.data
else:
spec_data = []
if flag == 3:
try:
company= CompanyInfo.objects.all()
except:
company = None
if company:
company = company[0]
site_id = company.site_identification
else:
site_id = ""
print(specification_id)
print(site_id)
spec_dataz = json.dumps(spec_data)
url = site_path + "productdetails/update_own_specification_prices/" + str(specification_id) + "/" + str(site_id) + "/"
headers = {'Content-Type': 'application/json',}
print(spec_data)
dataz = requests.post(url = url, headers=headers,data = spec_dataz)
data_response = dataz.json()
if data_response["success"] == True:
print("true hochche")
return JsonResponse({"success":True,"message":"The values have been updated","data":spec_data})
else:
#restore the values
print("true hochche na")
data = restore_data
for i in range(len(data)):
max_min_id = data[i]["id"]
max_min_data = data[i]
try:
spec_price = SpecificationPrice.objects.get(id=max_min_id)
except:
spec_price = None
if spec_price:
# restore_serializer = MaxMinSerializer(spec_price,many=False)
# restore_dataz = restore_serializer.data
# restore_data.append(restore_dataz)
spec_price_serializer = MaxMinSerializer1(spec_price,data=max_min_data)
if spec_price_serializer.is_valid():
spec_price_serializer.save()
flag = flag + 1
else:
return JsonResponse({"success":False,"message":"This max min value does not exist"})
return JsonResponse({"success":False,"message":'Mother site did not respond so data was not inserted'})
else:
#restore the data
data = restore_data
for i in range(len(data)):
max_min_id = data[i]["id"]
max_min_data = data[i]
try:
spec_price = SpecificationPrice.objects.get(id=max_min_id)
except:
spec_price = None
if spec_price:
# restore_serializer = MaxMinSerializer1(spec_price,many=False)
# restore_dataz = restore_serializer.data
# restore_data.append(restore_dataz)
spec_price_serializer = MaxMinSerializer1(spec_price,data=max_min_data)
if spec_price_serializer.is_valid():
spec_price_serializer.save()
flag = flag + 1
else:
return JsonResponse({"success":False,"message":"This max min value does not exist"})
return JsonResponse({"success":False,"message":"The values could not be updated"})
def check_price(request,specification_id):
#Fetching the max min values
try:
product_spec = ProductSpecification.objects.get(id = specification_id)
except:
product_spec = None
print(product_spec)
if product_spec:
mother_specification_id = product_spec.mother_specification_id
if product_spec.is_own == True:
return JsonResponse({"success":False, "message":"This is your own product you dont need to check the price."})
else:
#Fetch the max min values from the mother site
url = site_path + "productdetails/show_max_min_values/" +str(mother_specification_id)+ "/"
mother_response = requests.get(url = url)
mother_response = mother_response.json()
if mother_response["success"] == True:
if mother_response["on_hold"] == True:
product_spec.on_hold = True
return JsonResponse({"success":True,"message":"The product is kept on hold and cannot be sold"})
else:
counter_flag = 0
mother_data = mother_response["data"]
print(mother_data)
print(specification_id)
#Fetch the Specification Price of this product
try:
specification_prices = SpecificationPrice.objects.filter(specification_id = specification_id).order_by('id')
except:
specification_prices = None
print(specification_prices)
if specification_prices:
spec_serializer = MaxMinSerializer1(specification_prices,many=True)
specs_data = spec_serializer.data
specs_data = json.loads(json.dumps(specs_data))
#Making the comparisons
print(specs_data)
print(type(mother_data[0]["quantity"]))
print(type(specs_data[0]["quantity"]))
if mother_data[0]["status"] == "Single" and specs_data[0]["status"] == "Single":
if mother_data[0]["quantity"] == specs_data[0]["quantity"] and mother_data[0]["selling_price"] == specs_data[0]["purchase_price"]:
counter_flag = counter_flag +1
else:
pass
else:
pass
if mother_data[1]["status"] == "Minimum" and specs_data[1]["status"] == "Minimum":
if mother_data[1]["quantity"] == specs_data[1]["quantity"] and mother_data[1]["selling_price"] == specs_data[1]["purchase_price"]:
counter_flag = counter_flag +1
else:
pass
else:
pass
if mother_data[2]["status"] == "Maximum" and specs_data[2]["status"] == "Maximum":
if mother_data[2]["quantity"] == specs_data[2]["quantity"] and mother_data[2]["selling_price"] == specs_data[2]["purchase_price"]:
counter_flag = counter_flag +1
else:
pass
else:
pass
print("counter_flag")
print(counter_flag)
if counter_flag == 3:
return JsonResponse({"success":True,"message":"The product can be sold"})
else:
return JsonResponse({"success":False,"message":"This product's price has been changed and has to be on hold"})
else:
return JsonResponse({"success":False,"message":"The specification prices do not exist"})
else:
return JsonResponse({"success":False,"message":"This product does not exist"})
@api_view(["GET",])
def approve_purchase_order(request, order_id):
try:
order = Order.objects.get(id = order_id)
except:
order = None
all_item_data = []
if order:
order.admin_status = "Confirmed"
order.save()
warehouse_id = find_warehouse_id()
try:
order_details = OrderDetails.objects.filter(order_id = order_id)
except:
order_details = None
if order_details:
order_details_ids = list(order_details.values_list('id', flat=True))
else:
order_details_ids = []
for i in range(len(order_details_ids)):
try:
specific_item = OrderDetails.objects.get(id = order_details_ids[i])
except:
specific_item = None
if specific_item:
specific_item.admin_status = specific_item.mother_admin_status
specific_item.save()
purchase_price = specific_item.unit_price
specification_id = specific_item.specification_id
selling_price = fetch_selling_price(specification_id)
warehouse = [{"warehouse_id":warehouse_id,"quantity":specific_item.total_quantity}]
shop = []
item_data = {"product_id":specific_item.product_id,"specification_id":specific_item.specification_id,"purchase_price":purchase_price,"selling_price":selling_price,"warehouse":warehouse,"shop":shop}
insert_quantity = insert_purchase_product_quantity(item_data,order_id)
print("INSERT QUANTITY")
print(insert_quantity)
# all_item_data.append(item_data)
else:
pass
# main_data = {"order_id":order_id,"info":all_item_data }
# print(main_data)
# change_statuses = change_orderdetails_statuses(main_data)
return JsonResponse({"success":True,"message":"This invoice hass been approved"})
else:
return JsonResponse({"success":False,"message":"This order does not exist"})
def insert_purchase_product_quantity(api_values,order_id):
# demo values
# api_values = {
# 'product_id':35,
# 'specification_id':34,
# 'purchase_price': 100,
# 'selling_price': 120,
# 'warehouse': [
# {
# 'warehouse_id': 1,
# 'quantity': 200
# },
# {
# 'warehouse_id': 2,
# 'quantity': 200
# }
# ],
# 'shop': [
# {
# 'shop_id': 3,
# 'quantity': 200
# },
# {
# 'shop_id': 2,
# 'quantity': 200
# },
# {
# 'shop_id': 1,
# 'quantity': 200
# }
# ]
# }
#api_values = request.data
current_date = date.today()
#if request.method == 'POST':
# Insert the purchase price and selling price for that object:
# try:
price_data = {"product_id": api_values["product_id"], "specification_id": api_values["specification_id"],
"price": api_values["selling_price"], "purchase_price": api_values["purchase_price"]}
# Inserting the price
product_price_serializer = ProductPriceSerializer(data=price_data)
print("fjeswdifhfhds")
if product_price_serializer.is_valid():
product_price_serializer.save()
else:
print(product_price_serializer.errors)
# except:
# return JsonResponse({"success": False, "message": "The price could not be inserted"})
try:
# Fetching the product price
prod_price = ProductPrice.objects.filter(
specification_id=api_values["specification_id"]).last()
except:
prod_price = None
if prod_price:
purchase_price = prod_price.purchase_price
selling_price = prod_price.price
else:
return {"success": False, "message": "Price does not exist for this product"}
try:
# checking is there any warehouse data exists or not
if len(api_values['warehouse']) > 0:
for wareh in api_values['warehouse']:
try:
# getting the previous data if there is any in the similar name. If exists update the new value. if does not create new records.
wareh_query = WarehouseInfo.objects.filter(
warehouse_id=wareh['warehouse_id'], specification_id=api_values['specification_id']).last()
print("quertresult")
print(wareh_query)
if wareh_query:
# quantity_val = wareh_query[0].quantity
# new_quantity = quantity_val + wareh['quantity']
# wareh_query.update(quantity=new_quantity)
# wareh_query.save()
print("existing warehouse")
print(type(wareh['quantity']))
print(wareh_query.quantity)
warehouse_quantity = wareh_query.quantity
print(warehouse_quantity)
new_quantity = warehouse_quantity + int(wareh['quantity'])
print(new_quantity)
wareh_query.quantity = new_quantity
print(wareh_query.quantity)
wareh_query.save()
print(wareh_query.quantity)
try:
product_spec = ProductSpecification.objects.get(
id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("else ey dhuktese")
wareh_data = WarehouseInfo.objects.create(specification_id=api_values['specification_id'], product_id=api_values['product_id'], warehouse_id=wareh['warehouse_id'],
quantity=int(wareh['quantity']))
wareh_data.save()
try:
product_spec = ProductSpecification.objects.get(
id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
# updating the inventory report credit records for each ware house quantity. It will help to keep the records in future.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=wareh['quantity'], warehouse_id=wareh['warehouse_id'])
# report_data.save()
# Check to see if there are any inventory_reports
# try:
# report = inventory_report.objects.filter(product_id=api_values['product_id'],specification_id=api_values['specification_id'],warehouse_id=wareh['warehouse_id'],date=current_date).last()
# except:
# report = None
# if report:
# #Update the existing report
# report.credit += int(wareh['quantity'])
# report.save()
new_report = inventory_report.objects.create(product_id=api_values['product_id'], specification_id=api_values['specification_id'], warehouse_id=wareh['warehouse_id'], credit=int(
wareh['quantity']), date=current_date, purchase_price=purchase_price, selling_price=selling_price)
new_report.save()
# subtract_item = subtraction_track.objects.create(order_id = order_id, specification_id=api_values['specification_id'], warehouse_id=wareh['warehouse_id'], debit_quantity=int(
# wareh['quantity']), date=current_date)
# subtract_item.save()
except:
pass
if len(api_values['shop']) > 0:
for shops in api_values['shop']:
try:
# getting the existing shop values if is there any.
print(shops['shop_id'])
shop_query = ShopInfo.objects.filter(
shop_id=shops['shop_id'], specification_id=api_values['specification_id']).last()
print(shop_query)
if shop_query:
print("shop ase")
quantity_val = shop_query.quantity
new_quantity = quantity_val + int(shops['quantity'])
# shop_query.update(quantity=new_quantity)
shop_query.quantity = new_quantity
shop_query.save()
try:
product_spec = ProductSpecification.objects.get(
id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
else:
print("shop nai")
shop_data = ShopInfo.objects.create(specification_id=api_values['specification_id'], product_id=api_values['product_id'], shop_id=shops['shop_id'],
quantity=int(shops['quantity']))
shop_data.save()
# Updating the report table after being inserted the quantity corresponding to credit coloumn for each shop.
# report_data = inventory_report(
# product_id=api_values['product_id'], credit=shops['quantity'], shop_id=shops['shop_id'])
# report_data.save()
try:
product_spec = ProductSpecification.objects.get(
id=api_values['specification_id'])
except:
product_spec = None
if product_spec:
product_spec.save()
new_report = inventory_report.objects.create(product_id=api_values['product_id'], specification_id=api_values['specification_id'], shop_id=shops['shop_id'], credit=int(
shops['quantity']), date=current_date, purchase_price=purchase_price, selling_price=selling_price)
new_report.save()
# subtract_item = subtraction_track.objects.create(order_id = order_id, specification_id=api_values['specification_id'], shop_id = shops['shop_id'], debit_quantity=int(
# shops['quantity']), date=current_date)
# subtract_item.save()
except:
pass
#Insert subtract method here
subtraction_result = subtract_purchase_product_quantity(api_values,order_id)
print("SUBTRACTION_RESULT")
print(subtraction_result)
return {
"success": True,
"message": "Data has been added successfully"
}
except:
return {
"success": False,
"message": "Something went wrong !!"
}
# def subtract_purchase_warehouse quantity()
# def approve_purchase_orders(request,order_id):
def subtract_purchase_product_quantity(api_values,order_id):
print(api_values)
# api_values = {
# 'product_id':35,
# 'specification_id':34,
# 'purchase_price': 100,
# 'selling_price': 120,
# 'warehouse': [
# {
# 'warehouse_id': 1,
# 'quantity': 200
# },
# {
# 'warehouse_id': 2,
# 'quantity': 200
# }
# ],
# 'shop': [
# {
# 'shop_id': 3,
# 'quantity': 200
# },
# {
# 'shop_id': 2,
# 'quantity': 200
# },
# {
# 'shop_id': 1,
# 'quantity': 200
# }
# ]
# }
#api_values = request.data
current_date = date.today()
warehouse_data = api_values["warehouse"]
shop_data = api_values["shop"]
specification_id = api_values["specification_id"]
product_id = api_values["product_id"]
print(shop_data)
print(warehouse_data)
try:
if len(warehouse_data) > 0:
for i in range(len(warehouse_data)):
try:
warehouse_info = WarehouseInfo.objects.filter(specification_id=specification_id,warehouse_id=warehouse_data[i]["warehouse_id"]).last()
except:
warehouse_info = None
if warehouse_info:
if warehouse_info.quantity >= int(warehouse_data[i]["quantity"]):
#subtract the quantity
warehouse_info.quantity -= int(warehouse_data[i]["quantity"])
warehouse_info.save()
new_report = inventory_report.objects.create (product_id=product_id, specification_id= specification_id, warehouse_id= warehouse_data[i]["warehouse_id"], debit= int(warehouse_data[i]["quantity"]), date=current_date)
new_report.save()
subtract_item = subtraction_track.objects.create(order_id = order_id, specification_id = specification_id, warehouse_id = warehouse_data[i]["warehouse_id"], debit_quantity= int(warehouse_data[i]["quantity"]),date=current_date)
subtract_item.save()
else:
return False
else:
return False
if len(shop_data) > 0:
print(len(shop_data))
for k in range(len(shop_data)):
i = k
try:
shop_info = ShopInfo.objects.filter(specification_id=specification_id,shop_id=shop_data[i]["shop_id"]).last()
except:
shop_info = None
if shop_info:
print("SHOP INFO")
print(shop_info)
if shop_info.quantity >= int(shop_data[i]["quantity"]):
print("quantity subtract hochchce")
#subtract the quantity
shop_info.quantity -= int(shop_data[i]["quantity"])
shop_info.save()
print("shop_info save hochche")
# new_report = inventory_report.objects.create (product_id=product_id, specification_id= specification_id, shop_id= shop_data[i]["warehouse_id"], credit= int(shop_data[i]["quantity"]))
# new_report.save()
# print("new_report save")
# subtract_item = subtraction_track.objects.create(order_id = order_id, specification_id = specification_id, shop_id = shop_data[i]["warehouse_id"], debit_quantity= int(shop_data[i]["quantity"]),date=current_date)
# subtract_item.save()
# print("subtract_item save")
new_report = inventory_report.objects.create (product_id=product_id, specification_id= specification_id, shop_id= shop_data[i]["shop_id"], debit= int(shop_data[i]["quantity"]), date=current_date)
new_report.save()
subtract_item = subtraction_track.objects.create(order_id = order_id, specification_id = specification_id, shop_id = shop_data[i]["shop_id"], debit_quantity= int(shop_data[i]["quantity"]),date=current_date)
subtract_item.save()
else:
print("ERRRORRRRRR")
return False
else:
print("SECONDDDDDDDDDDDDDDDDDD")
return False
return True
except:
return False
@api_view(["GET", "POST"])
def get_all_quantity_list_and_price(request, specification_id):
if request.method == 'GET':
purchase_price = 0
selling_price = 0
try:
spec_price = SpecificationPrice.objects.filter(specification_id = specification_id,status="Single").last()
except:
spec_price = None
if spec_price:
purchase_price = spec_price.purchase_price
selling_price = spec_price.mrp
try:
warehouse_values = []
shop_values = []
warehouse_ids = []
shop_ids = []
warehouse_query = WarehouseInfo.objects.filter(
specification_id=specification_id)
print(warehouse_query)
wh_name = Warehouse.objects.all()
print(wh_name)
for wq in warehouse_query:
print(wq.warehouse_id)
warehouse_data = Warehouse.objects.get(id=wq.warehouse_id)
wh_data = {"warehouse_id": warehouse_data.id, "previous_quantity": wq.quantity,
"warehouse_name": warehouse_data.warehouse_name}
print(wh_data)
warehouse_values.append(wh_data)
warehouse_ids.append(wq.warehouse_id)
print(warehouse_values)
for warehouse in wh_name:
if warehouse.id not in warehouse_ids:
wh_data = {"warehouse_id": warehouse.id, "previous_quantity": 0,
"warehouse_name": warehouse.warehouse_name}
warehouse_values.append(wh_data)
print(warehouse_values)
shopinfo_query = ShopInfo.objects.filter(
specification_id=specification_id)
all_shops = Shop.objects.all()
print(shopinfo_query)
print(all_shops)
for shop in shopinfo_query:
shop_data = Shop.objects.get(id=shop.shop_id)
datas = {"shop_id": shop_data.id, "previous_quantity": shop.quantity,
"shop_name": shop_data.shop_name}
shop_values.append(datas)
shop_ids.append(shop.shop_id)
for shops in all_shops:
if shops.id not in shop_ids:
datas = {"shop_id": shops.id, "previous_quantity": 0,
"shop_name": shops.shop_name}
shop_values.append(datas)
return JsonResponse({
"success": True,
"message": "Data has been retrieved successfully",
"data": {
"warehouse": warehouse_values,
"shop": shop_values ,
"purchase_price": purchase_price,
"selling_price" : selling_price
}
})
except:
return JsonResponse({
"success": False,
"message": "Something went wrong"
})
#Find warehouse id
def find_warehouse_id():
try:
warehouse = Warehouse.objects.filter(warehouse_name="Mothersite",warehouse_location="Mothersite").last()
except:
warehouse = None
if warehouse:
warehouse_id = warehouse.id
else:
warehouse_id = -1
return warehouse_id
def fetch_selling_price(specification_id):
try:
p_price = ProductPrice.objects.get(specification_id=specification_id)
except:
p_price = None
if p_price:
selling_price = p_price.price
else:
selling_price = 0
return selling_price
| [
"[email protected]"
] | |
25ef6c97fd596d1d2354d836019a500f2ecc8459 | a1508558da875f6ea3c55840b44df74dfd8e5f54 | /trade_free/portfolio/simple_portfolio.py | 94769841a1f4946dcd4018c81dafdf1cb40da449 | [
"Apache-2.0"
] | permissive | NewLanded/TradeFree | 49cea6a17b5f3b4661d1c98a81e031123f02b3e6 | f65122f5ed01cc1272fd2f03121ff3805a1967cb | refs/heads/master | 2020-07-19T21:13:01.976587 | 2020-01-09T14:02:29 | 2020-01-09T14:02:29 | 206,515,265 | 2 | 2 | Apache-2.0 | 2020-01-09T14:02:31 | 2019-09-05T08:36:58 | Python | UTF-8 | Python | false | false | 6,922 | py | import datetime
import math
import numpy as np
from utils.constant_util import BUY, SELL
from .abs_portfolio import AbsPortfolio
from ..event import OrderEvent
class SimplePortfolio(AbsPortfolio):
"""
测试Portfolio, 向brokerage对象发送固定的交易数量, 不考虑风控或头寸
"""
def __init__(self, start_date, event_queue, bars, initial_capital):
"""
Parameters:
bars - The DataHandler object with current market data. # DataHandler对象, 当前市场数据
events - The Event Queue object. # 事件队列
start_date - The start date (bar) of the portfolio.
initial_capital - The starting capital in USD. # 初始现金
"""
self.bars = bars
self.event_queue = event_queue
self.symbol_list = self.bars.symbol_list
self.start_date_previous_day = start_date - datetime.timedelta(days=1)
self.initial_capital = initial_capital
self.all_positions = self._construct_all_positions()
self.current_positions = dict((k, v) for k, v in [(s, 0) for s in self.symbol_list])
self.all_holdings = self._construct_all_holdings()
self.current_holdings = self._construct_current_holdings()
self.bs_data = []
def _construct_all_positions(self):
"""
使用start_date构造all_positions,以确定时间索引何时开始
"""
all_positions = dict((k, v) for k, v in [(s, 0) for s in self.symbol_list])
all_positions['datetime'] = self.start_date_previous_day
return [all_positions]
def _construct_all_holdings(self):
"""
使用start_date构造all_holdings,以确定时间索引何时开始
"""
all_holdings = dict((k, v) for k, v in [(s, 0.0) for s in self.symbol_list])
all_holdings['datetime'] = self.start_date_previous_day
all_holdings['cash'] = self.initial_capital # 现金
all_holdings['commission'] = 0.0 # 累计佣金
all_holdings['total'] = self.initial_capital # 包括现金和任何未平仓头寸在内的总账户资产, 空头头寸被视为负值
return [all_holdings]
def _construct_current_holdings(self):
"""
和construct_all_holdings类似, 但是只作用于当前时刻
"""
current_holdings = dict((k, v) for k, v in [(s, 0.0) for s in self.symbol_list])
current_holdings['cash'] = self.initial_capital
current_holdings['commission'] = 0.0
current_holdings['total'] = self.initial_capital
return current_holdings
def update_signal(self, event):
"""
接收SignalEvent, 生成订单Event
"""
# if event.type == 'SIGNAL':
order_event = self.generate_naive_order(event)
self.event_queue.put(order_event)
def generate_naive_order(self, signal):
"""
简单的生成OrderEvent, 不考虑风控等
Parameters:
signal - The SignalEvent signal information.
"""
order = None
symbol = signal.symbol
event_id = signal.event_id
direction = signal.direction
order_type = signal.order_type
mkt_quantity = signal.quantity
mkt_price = signal.price
single_date = signal.single_date
if mkt_quantity:
order = OrderEvent(event_id, symbol, order_type, mkt_quantity, mkt_price, direction, single_date)
return order
def update_fill(self, event):
"""
使用FillEvent更新持仓
"""
# if event.type == 'FILL':
self.update_positions_from_fill(event)
self.update_holdings_from_fill(event)
self.update_bs_data_from_fill(event)
def update_positions_from_fill(self, fill):
"""
使用FilltEvent对象并更新 position
Parameters:
fill - The FillEvent object to update the positions with.
"""
# Check whether the fill is a buy or sell
fill_dir = 0
if fill.direction == BUY:
fill_dir = 1
if fill.direction == SELL:
fill_dir = -1
# Update positions list with new quantities
self.current_positions[fill.symbol] += fill_dir * fill.quantity
def update_bs_data_from_fill(self, fill):
"""记录buy sell 数据"""
close_point = self.bars.get_latest_bars(fill.symbol)[0][5]
bs_data = {"bs_date": fill.fill_date, "direction": fill.direction, "quantity": fill.quantity, "price": close_point, "symbol": fill.symbol}
self.bs_data.append(bs_data)
def update_holdings_from_fill(self, fill):
"""
使用FilltEvent对象并更新 holding
Parameters:
fill - The FillEvent object to update the holdings with.
"""
# Check whether the fill is a buy or sell
fill_dir = 0
if fill.direction == BUY:
fill_dir = 1
if fill.direction == SELL:
fill_dir = -1
# Update holdings list with new quantities
fill_cost = self.bars.get_latest_bars(fill.symbol)[0][5] # Close price
cost = fill_dir * fill_cost * fill.quantity
self.current_holdings[fill.symbol] += cost
self.current_holdings['commission'] += fill.commission
self.current_holdings['cash'] -= (cost + fill.commission)
self.current_holdings['total'] -= (cost + fill.commission)
def update_timeindex(self):
"""
添加新纪录到positions, 使用队列中的 MarketEvent
"""
bars = {}
for symbol in self.symbol_list:
bars[symbol] = self.bars.get_latest_bars(symbol, N=1)
# Update positions
data_position = dict((k, v) for k, v in [(s, 0) for s in self.symbol_list])
data_position['datetime'] = bars[self.symbol_list[0]][0][1]
for symbol in self.symbol_list:
data_position[symbol] = self.current_positions[symbol]
# Append the current positions
self.all_positions.append(data_position)
# Update holdings
data_holding = dict((k, v) for k, v in [(s, 0) for s in self.symbol_list])
data_holding['datetime'] = bars[self.symbol_list[0]][0][1]
data_holding['cash'] = self.current_holdings['cash']
data_holding['commission'] = self.current_holdings['commission']
data_holding['total'] = self.current_holdings['cash']
for symbol in self.symbol_list:
# Approximation to the real value
market_value = self.current_positions[symbol] * bars[symbol][0][5] # 数量 * 收盘价 进行估值
data_holding[symbol] = market_value
data_holding[symbol + "_close"] = bars[symbol][0][5]
data_holding['total'] = data_holding['total'] + market_value if math.isnan(market_value) is False else data_holding['total']
self.all_holdings.append(data_holding)
| [
"[email protected]"
] | |
45149d5320d27687d7ff31975d14835cd619efa7 | 5d77833445b1ef95b5ca7b9a886f98cb38a16286 | /code/9-12 TacotronDecoderwrapper.py | 28ddda9aacb18edb2af96dfac848ac5941305610 | [] | no_license | wangbin0227/TensorFlow_Engineering_Implementation | bbafa4933c3244b65f0d3a2625fd58a9f8726c34 | cb787e359da9ac5a08d00cd2458fecb4cb5a3a31 | refs/heads/master | 2023-03-18T10:58:58.916184 | 2021-03-16T15:03:49 | 2021-03-16T15:03:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,362 | py | """
@author: 代码医生工作室
@公众号:xiangyuejiqiren (内有更多优秀文章及学习资料)
@来源: <深度学习之TensorFlow工程化项目实战>配套代码 (700+页)
@配套代码技术支持:bbs.aianaconda.com (有问必答)
"""
import tensorflow as tf
from tensorflow.python.framework import ops, tensor_shape
from tensorflow.python.ops import array_ops, check_ops, rnn_cell_impl, tensor_array_ops
from tensorflow.python.util import nest
from tensorflow.contrib.seq2seq.python.ops import attention_wrapper
attention = __import__("9-11 attention")
LocationSensitiveAttention = attention.LocationSensitiveAttention
class TacotronDecoderwrapper(tf.nn.rnn_cell.RNNCell):
#初始化
def __init__(self,encoder_outputs, is_training, rnn_cell, num_mels , outputs_per_step):
super(TacotronDecoderwrapper, self).__init__()
self._training = is_training
self._attention_mechanism = LocationSensitiveAttention(256, encoder_outputs)# [N, T_in, attention_depth=256]
self._cell = rnn_cell
self._frame_projection = tf.keras.layers.Dense(units=num_mels * outputs_per_step, name='projection_frame')# [N, T_out/r, M*r]
# # [N, T_out/r, r]
self._stop_projection = tf.keras.layers.Dense(units=outputs_per_step,name='projection_stop')
self._attention_layer_size = self._attention_mechanism.values.get_shape()[-1].value
self._output_size = num_mels * outputs_per_step#定义输出大小
def _batch_size_checks(self, batch_size, error_message):
return [check_ops.assert_equal(batch_size, self._attention_mechanism.batch_size,
message=error_message)]
@property
def output_size(self):
return self._output_size
#@property
def state_size(self):#返回的状态大小(代码参考AttentionWrapper)
return tf.contrib.seq2seq.AttentionWrapperState(
cell_state=self._cell._cell.state_size,
time=tensor_shape.TensorShape([]),
attention=self._attention_layer_size,
alignments=self._attention_mechanism.alignments_size,
alignment_history=(),#)#,
attention_state = ())
def zero_state(self, batch_size, dtype):#返回一个0状态(代码参考AttentionWrapper)
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
cell_state = self._cell.zero_state(batch_size, dtype)
error_message = (
"When calling zero_state of TacotronDecoderCell %s: " % self._base_name +
"Non-matching batch sizes between the memory "
"(encoder output) and the requested batch size.")
with ops.control_dependencies(
self._batch_size_checks(batch_size, error_message)):
cell_state = nest.map_structure(
lambda s: array_ops.identity(s, name="checked_cell_state"),
cell_state)
return tf.contrib.seq2seq.AttentionWrapperState(
cell_state=cell_state,
time=array_ops.zeros([], dtype=tf.int32),
attention=rnn_cell_impl._zero_state_tensors(self._attention_layer_size, batch_size, dtype),
alignments=self._attention_mechanism.initial_alignments(batch_size, dtype),
alignment_history=tensor_array_ops.TensorArray(dtype=dtype, size=0,dynamic_size=True),
attention_state = tensor_array_ops.TensorArray(dtype=dtype, size=0,dynamic_size=True)
)
def __call__(self, inputs, state):#本时刻的真实输出y,decoder对上一时刻输出的状态。一起预测下一时刻
drop_rate = 0.5 if self._training else 0.0#设置dropout
#对输入预处理
with tf.variable_scope('decoder_prenet'):# [N, T_in, prenet_depths[-1]=128]
for i, size in enumerate([256, 128]):
dense = tf.keras.layers.Dense(units=size, activation=tf.nn.relu, name='dense_%d' % (i+1))(inputs)
inputs = tf.keras.layers.Dropout( rate=drop_rate, name='dropout_%d' % (i+1))(dense, training=self._training)
#加入注意力特征
rnn_input = tf.concat([inputs, state.attention], axis=-1)
#经过一个全连接变换。再传入解码器rnn中
rnn_output, next_cell_state = self._cell(tf.keras.layers.Dense(256)(rnn_input), state.cell_state)
#计算本次注意力
context_vector, alignments, cumulated_alignments =attention_wrapper._compute_attention(self._attention_mechanism,
rnn_output,state.alignments,None)#state.alignments为上一次的累计注意力
#保存历史alignment(与原始的AttentionWrapper一致)
alignment_history = state.alignment_history.write(state.time, alignments)
#返回本次的wrapper状态
next_state = tf.contrib.seq2seq.AttentionWrapperState( time=state.time + 1,
cell_state=next_cell_state,attention=context_vector,
alignments=cumulated_alignments, alignment_history=alignment_history,
attention_state = state.attention_state)
#计算本次结果:将解码器输出与注意力结果concat起来。作为最终的输入
projections_input = tf.concat([rnn_output, context_vector], axis=-1)
#两个全连接分别预测输出的下一个结果和停止标志<stop_token>
cell_outputs = self._frame_projection(projections_input)#得到下一次outputs_per_step个帧的mel特征
stop_tokens = self._stop_projection(projections_input)
if self._training==False:
stop_tokens = tf.nn.sigmoid(stop_tokens)
return (cell_outputs, stop_tokens), next_state
| [
"[email protected]"
] | |
ba257c7a32b2ec4aa2b22fc7c7b92e305f9f957d | 5b3caf64b77161748d0929d244798a8fb914d9c5 | /Python Excel Examples/GeneralApiDemo/convertInRequest.py | b196e1d1ec4e23d1a9d95f987f3a2b8969ea75af | [] | no_license | EiceblueCloud/Spire.Cloud.Excel | 0d56864991eaf8d44c38f21af70db614b1d804b7 | d9845d5cefd15a3ab408b2c9f80828a4767e2b82 | refs/heads/master | 2021-07-20T23:44:39.068568 | 2021-07-15T03:04:49 | 2021-07-15T03:04:49 | 230,225,396 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 556 | py | import spirecloudexcel
from spirecloudexcel.configuration import Configuration as ExcelConfiguration
from spirecloudexcel.api.general_api import GeneralApi
appId = "your id"
appKey = "your key"
baseUrl = "https://api.e-iceblue.cn"
configuration = ExcelConfiguration(appId, appKey,baseUrl)
api = spirecloudexcel.api.general_api.GeneralApi(configuration)
format = "Pdf" #Supported formats: Xlsx/Xls/Xlsb/Ods/Pdf/Xps/Ps/Pcl
file = "D:/inputFile/charts.xlsx"
password = ""
result = api.convert_in_request(format,file=file, password=password)
| [
"[email protected]"
] | |
d634e31486f5044b31ab168805511a33ded6ef6a | eacfc1c0b2acd991ec2cc7021664d8e79c9e58f6 | /ccpnmr2.4/python/ccp/format/marvin/generalIO.py | 21409931818e74a5fd154a4652c790008a1b86d2 | [] | no_license | edbrooksbank/ccpnmr2.4 | cfecb0896dcf8978d796e6327f7e05a3f233a921 | f279ca9bb2d972b1ce075dad5fcc16e6f4a9496c | refs/heads/master | 2021-06-30T22:29:44.043951 | 2019-03-20T15:01:09 | 2019-03-20T15:01:09 | 176,757,815 | 0 | 1 | null | 2020-07-24T14:40:26 | 2019-03-20T14:59:23 | HTML | UTF-8 | Python | false | false | 2,522 | py |
"""
======================COPYRIGHT/LICENSE START==========================
generalIO.py: General I/O information for marvin files
Copyright (C) 2007 Wim Vranken (European Bioinformatics Institute)
=======================================================================
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
A copy of this license can be found in ../../../../license/LGPL.license
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
======================COPYRIGHT/LICENSE END============================
for further information, please contact :
- CCPN website (http://www.ccpn.ac.uk/)
- PDBe website (http://www.ebi.ac.uk/pdbe/)
- contact Wim Vranken ([email protected])
=======================================================================
If you are using this software for academic purposes, we suggest
quoting the following references:
===========================REFERENCE START=============================
R. Fogh, J. Ionides, E. Ulrich, W. Boucher, W. Vranken, J.P. Linge, M.
Habeck, W. Rieping, T.N. Bhat, J. Westbrook, K. Henrick, G. Gilliland,
H. Berman, J. Thornton, M. Nilges, J. Markley and E. Laue (2002). The
CCPN project: An interim report on a data model for the NMR community
(Progress report). Nature Struct. Biol. 9, 416-418.
Wim F. Vranken, Wayne Boucher, Tim J. Stevens, Rasmus
H. Fogh, Anne Pajon, Miguel Llinas, Eldon L. Ulrich, John L. Markley, John
Ionides and Ernest D. Laue (2005). The CCPN Data Model for NMR Spectroscopy:
Development of a Software Pipeline. Proteins 59, 687 - 696.
===========================REFERENCE END===============================
"""
from ccp.format.general.formatIO import FormatFile
from ccp.format.general.Constants import defaultMolCode
#####################
# Class definitions #
#####################
class MarvinGenericFile(FormatFile):
def setGeneric(self):
self.format = 'marvin'
self.defaultMolCode = defaultMolCode
| [
"[email protected]"
] | |
8429023f1b3c30a87447a7c557bf8a050b626b9e | f1cb02057956e12c352a8df4ad935d56cb2426d5 | /LeetCode/245. Shortest Word Distance III/Solution.py | fe576e1094fd4f1abf5f1fd442f98d9271e0048c | [] | no_license | nhatsmrt/AlgorithmPractice | 191a6d816d98342d723e2ab740e9a7ac7beac4ac | f27ba208b97ed2d92b4c059848cc60f6b90ce75e | refs/heads/master | 2023-06-10T18:28:45.876046 | 2023-05-26T07:46:42 | 2023-05-26T07:47:10 | 147,932,664 | 15 | 2 | null | null | null | null | UTF-8 | Python | false | false | 768 | py | class Solution:
def shortestWordDistance(self, words: List[str], word1: str, word2: str) -> int:
index = {}
for i, word in enumerate(words):
if word not in index:
index[word] = []
index[word].append(i)
ret = 10000000000
if word1 == word2:
for i in range(len(index[word1]) - 1):
ret = min(ret, index[word1][i + 1] - index[word1][i])
return ret
occ1 = index[word1]
occ2 = index[word2]
i = 0
j = 0
while i < len(occ1) and j < len(occ2):
ret = min(ret, abs(occ1[i] - occ2[j]))
if occ1[i] < occ2[j]:
i += 1
else:
j += 1
return ret
| [
"[email protected]"
] | |
8d9d0e317790133f034bcece449e9d1801f40422 | f124cb2443577778d8708993c984eafbd1ae3ec3 | /saleor/plugins/openid_connect/dataclasses.py | df281787eae5485c4eed4cc9fa9dc62b63f84957 | [
"BSD-3-Clause"
] | permissive | quangtynu/saleor | ac467193a7779fed93c80251828ac85d92d71d83 | 5b0e5206c5fd30d81438b6489d0441df51038a85 | refs/heads/master | 2023-03-07T19:41:20.361624 | 2022-10-20T13:19:25 | 2022-10-20T13:19:25 | 245,860,106 | 1 | 0 | BSD-3-Clause | 2023-03-06T05:46:25 | 2020-03-08T17:44:18 | Python | UTF-8 | Python | false | false | 316 | py | from dataclasses import dataclass
@dataclass
class OpenIDConnectConfig:
client_id: str
client_secret: str
enable_refresh_token: bool
json_web_key_set_url: str
authorization_url: str
logout_url: str
token_url: str
user_info_url: str
audience: str
use_scope_permissions: bool
| [
"[email protected]"
] | |
48b3d55b329489d00e4124a4623d217aa24253ca | 9b64f0f04707a3a18968fd8f8a3ace718cd597bc | /huaweicloud-sdk-osm/huaweicloudsdkosm/v2/model/incident_message_v2.py | 0bef2967a5076ff962fc33551f637afbe604a4a8 | [
"Apache-2.0"
] | permissive | jaminGH/huaweicloud-sdk-python-v3 | eeecb3fb0f3396a475995df36d17095038615fba | 83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b | refs/heads/master | 2023-06-18T11:49:13.958677 | 2021-07-16T07:57:47 | 2021-07-16T07:57:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,941 | py | # coding: utf-8
import re
import six
class IncidentMessageV2:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'type': 'int',
'replier': 'str',
'content': 'str',
'message_id': 'str',
'replier_name': 'str',
'create_time': 'datetime',
'is_first_message': 'int',
'accessory_list': 'list[SimpleAccessoryV2]'
}
attribute_map = {
'type': 'type',
'replier': 'replier',
'content': 'content',
'message_id': 'message_id',
'replier_name': 'replier_name',
'create_time': 'create_time',
'is_first_message': 'is_first_message',
'accessory_list': 'accessory_list'
}
def __init__(self, type=None, replier=None, content=None, message_id=None, replier_name=None, create_time=None, is_first_message=None, accessory_list=None):
"""IncidentMessageV2 - a model defined in huaweicloud sdk"""
self._type = None
self._replier = None
self._content = None
self._message_id = None
self._replier_name = None
self._create_time = None
self._is_first_message = None
self._accessory_list = None
self.discriminator = None
if type is not None:
self.type = type
if replier is not None:
self.replier = replier
if content is not None:
self.content = content
if message_id is not None:
self.message_id = message_id
if replier_name is not None:
self.replier_name = replier_name
if create_time is not None:
self.create_time = create_time
if is_first_message is not None:
self.is_first_message = is_first_message
if accessory_list is not None:
self.accessory_list = accessory_list
@property
def type(self):
"""Gets the type of this IncidentMessageV2.
类型,0客户留言 1华为工程师留言
:return: The type of this IncidentMessageV2.
:rtype: int
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this IncidentMessageV2.
类型,0客户留言 1华为工程师留言
:param type: The type of this IncidentMessageV2.
:type: int
"""
self._type = type
@property
def replier(self):
"""Gets the replier of this IncidentMessageV2.
回复人ID
:return: The replier of this IncidentMessageV2.
:rtype: str
"""
return self._replier
@replier.setter
def replier(self, replier):
"""Sets the replier of this IncidentMessageV2.
回复人ID
:param replier: The replier of this IncidentMessageV2.
:type: str
"""
self._replier = replier
@property
def content(self):
"""Gets the content of this IncidentMessageV2.
留言内容
:return: The content of this IncidentMessageV2.
:rtype: str
"""
return self._content
@content.setter
def content(self, content):
"""Sets the content of this IncidentMessageV2.
留言内容
:param content: The content of this IncidentMessageV2.
:type: str
"""
self._content = content
@property
def message_id(self):
"""Gets the message_id of this IncidentMessageV2.
留言id
:return: The message_id of this IncidentMessageV2.
:rtype: str
"""
return self._message_id
@message_id.setter
def message_id(self, message_id):
"""Sets the message_id of this IncidentMessageV2.
留言id
:param message_id: The message_id of this IncidentMessageV2.
:type: str
"""
self._message_id = message_id
@property
def replier_name(self):
"""Gets the replier_name of this IncidentMessageV2.
回复人名称
:return: The replier_name of this IncidentMessageV2.
:rtype: str
"""
return self._replier_name
@replier_name.setter
def replier_name(self, replier_name):
"""Sets the replier_name of this IncidentMessageV2.
回复人名称
:param replier_name: The replier_name of this IncidentMessageV2.
:type: str
"""
self._replier_name = replier_name
@property
def create_time(self):
"""Gets the create_time of this IncidentMessageV2.
创建时间
:return: The create_time of this IncidentMessageV2.
:rtype: datetime
"""
return self._create_time
@create_time.setter
def create_time(self, create_time):
"""Sets the create_time of this IncidentMessageV2.
创建时间
:param create_time: The create_time of this IncidentMessageV2.
:type: datetime
"""
self._create_time = create_time
@property
def is_first_message(self):
"""Gets the is_first_message of this IncidentMessageV2.
是否是第一条留言
:return: The is_first_message of this IncidentMessageV2.
:rtype: int
"""
return self._is_first_message
@is_first_message.setter
def is_first_message(self, is_first_message):
"""Sets the is_first_message of this IncidentMessageV2.
是否是第一条留言
:param is_first_message: The is_first_message of this IncidentMessageV2.
:type: int
"""
self._is_first_message = is_first_message
@property
def accessory_list(self):
"""Gets the accessory_list of this IncidentMessageV2.
附件列表
:return: The accessory_list of this IncidentMessageV2.
:rtype: list[SimpleAccessoryV2]
"""
return self._accessory_list
@accessory_list.setter
def accessory_list(self, accessory_list):
"""Sets the accessory_list of this IncidentMessageV2.
附件列表
:param accessory_list: The accessory_list of this IncidentMessageV2.
:type: list[SimpleAccessoryV2]
"""
self._accessory_list = accessory_list
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
return json.dumps(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IncidentMessageV2):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
8376f3ba760e0968095243d0a6947b384dd9d9c9 | d3efc82dfa61fb82e47c82d52c838b38b076084c | /utils/ETF/Redemption_SA/YW_ETFSS_SZSH_019.py | 1b0fde97d8686e9b2f5c74c8d27dca8b23258a17 | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,493 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
import time
sys.path.append("/home/yhl2/workspace/xtp_test/ETF")
from import_common import *
sys.path.append("/home/yhl2/workspace/xtp_test/ETF/etf_service")
from ETF_GetComponentShare import etf_get_all_component_stk
class YW_ETFSS_SZSH_019(xtp_test_case):
def test_YW_ETFSS_SZSH_019(self):
# -----------ETF赎回-------------
title = '深圳ETF赎回--允许现金替代:T-1日ETF拥股量1unit→T日赎回ETF'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'case_ID': 'ATC-204-019',
'期望状态': '全成',
'errorID': 0,
'errorMSG': '',
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title + ', case_ID=' + case_goal['case_ID'])
unit_info = {
'ticker': '189902', # etf代码
'etf_unit': 1, # etf赎回单位数
'etf_unit_sell': 1, # etf卖出单位数
'component_unit_sell': 1 # 成分股卖出单位数
}
# -----------ETF赎回-------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryEtfQty(unit_info['ticker'], '2', '14', '2', '0',
'B', case_goal['期望状态'], Api)
# -----------查询ETF赎回前成分股持仓-------------
component_stk_info = etf_get_all_component_stk(unit_info['ticker'])
# 定义委托参数信息------------------------------------------
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'用例错误原因': '获取下单参数失败, ' + stkparm['错误原因'],
}
etf_query_log(case_goal, rs)
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_ETF'],
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SZ_A'],
'ticker':
stkparm['证券代码'],
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_REDEMPTION'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'quantity':
int(unit_info['etf_unit'] * stkparm['最小申赎单位']),
}
EtfParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = etfServiceTest(Api, case_goal, wt_reqs, component_stk_info)
etf_creation_log(case_goal, rs)
# --------二级市场,卖出etf-----------
case_goal['期望状态'] = '废单'
case_goal['errorID'] = 11010121
case_goal['errorMSG'] = 'Failed to check security quantity.'
quantity = int(unit_info['etf_unit_sell'] *
stkparm['最小申赎单位']) # 二级市场卖出的etf数量
quantity_list = split_etf_quantity(quantity)
# 查询涨停价
limitup_px = getUpPrice(stkparm['证券代码'])
rs = {}
for etf_quantity in quantity_list:
wt_reqs_etf = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SZ_A'],
'ticker':
stkparm['证券代码'],
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'],
'price':
limitup_px,
'quantity':
etf_quantity,
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs_etf)
if rs['用例测试结果'] is False:
etf_sell_log(case_goal, rs)
self.assertEqual(rs['用例测试结果'], True)
return
etf_sell_log(case_goal, rs)
# ------------二级市场卖出成份股-----------
case_goal['期望状态'] = '全成'
case_goal['errorID'] = 0
case_goal['errorMSG'] = ''
etf_component_info = QueryEtfComponentsInfoDB(stkparm['证券代码'],wt_reqs['market'])
rs = {}
for stk_info in etf_component_info:
stk_code = stk_info[0]
components_share = QueryEtfComponentsDB(stkparm['证券代码'],
stk_code)
components_total = int(
components_share * unit_info['component_unit_sell'])
quantity = get_valid_amount(components_total)
limitup_px = getUpPrice(stk_code)
wt_reqs = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SZ_A'],
'ticker':
stk_code,
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'],
'price':
limitup_px,
'quantity':
quantity,
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
rs = serviceTest(Api, case_goal, wt_reqs)
if rs['用例测试结果'] is False:
etf_components_sell_log(case_goal, rs)
self.assertEqual(rs['用例测试结果'], True)
etf_components_sell_log(case_goal, rs)
self.assertEqual(rs['用例测试结果'], True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
68a3e17117ffc29cf8def3bcc4810417498b7ef9 | 297c440536f04c5ff4be716b445ea28cf007c930 | /App/migrations/0007_auto_20200403_2201.py | 2c78246df112461b79785ba55ec3ca0a977b1975 | [] | no_license | Chukslord1/SchoolManagement | 446ab8c643035c57d7320f48905ef471ab3e0252 | 23fd179c0078d863675b376a02193d7c1f3c52e0 | refs/heads/master | 2023-02-03T09:14:24.036840 | 2020-12-14T11:06:43 | 2020-12-14T11:06:43 | 247,177,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 412 | py | # Generated by Django 3.0 on 2020-04-04 05:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('App', '0006_auto_20200403_1841'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='secret_pin',
field=models.CharField(blank=True, max_length=12, null=True),
),
]
| [
"[email protected]"
] | |
6d14d26ba8d381f9ed5cef9a5cfdb6f18817b2ca | 2a3743ced45bd79826dcdc55f304da049f627f1b | /venv/lib/python3.7/site-packages/jedi/evaluate/compiled/subprocess/__init__.py | dea2f66d3727885dad3d9c94f687112e8a842988 | [
"MIT"
] | permissive | Dimasik007/Deribit_funding_rate_indicator | 12cc8cd7c0be564d6e34d9eae91940c62492ae2a | 3251602ae5249069489834f9afb57b11ff37750e | refs/heads/master | 2023-05-26T10:14:20.395939 | 2019-08-03T11:35:51 | 2019-08-03T11:35:51 | 198,705,946 | 5 | 3 | MIT | 2023-05-22T22:29:24 | 2019-07-24T20:32:19 | Python | UTF-8 | Python | false | false | 14,161 | py | """
Makes it possible to do the compiled analysis in a subprocess. This has two
goals:
1. Making it safer - Segfaults and RuntimeErrors as well as stdout/stderr can
be ignored and dealt with.
2. Make it possible to handle different Python versions as well as virtualenvs.
"""
import os
import sys
import subprocess
import socket
import errno
import traceback
from functools import partial
from threading import Thread
try:
from queue import Queue, Empty
except ImportError:
from Queue import Queue, Empty # python 2.7
from jedi._compatibility import queue, is_py3, force_unicode, \
pickle_dump, pickle_load, GeneralizedPopen, weakref
from jedi import debug
from jedi.cache import memoize_method
from jedi.evaluate.compiled.subprocess import functions
from jedi.evaluate.compiled.access import DirectObjectAccess, AccessPath, \
SignatureParam
from jedi.api.exceptions import InternalError
_MAIN_PATH = os.path.join(os.path.dirname(__file__), '__main__.py')
def _enqueue_output(out, queue):
for line in iter(out.readline, b''):
queue.put(line)
def _add_stderr_to_debug(stderr_queue):
while True:
# Try to do some error reporting from the subprocess and print its
# stderr contents.
try:
line = stderr_queue.get_nowait()
line = line.decode('utf-8', 'replace')
debug.warning('stderr output: %s' % line.rstrip('\n'))
except Empty:
break
def _get_function(name):
return getattr(functions, name)
def _cleanup_process(process, thread):
try:
process.kill()
process.wait()
except OSError:
# Raised if the process is already killed.
pass
thread.join()
for stream in [process.stdin, process.stdout, process.stderr]:
try:
stream.close()
except OSError:
# Raised if the stream is broken.
pass
class _EvaluatorProcess(object):
def __init__(self, evaluator):
self._evaluator_weakref = weakref.ref(evaluator)
self._evaluator_id = id(evaluator)
self._handles = {}
def get_or_create_access_handle(self, obj):
id_ = id(obj)
try:
return self.get_access_handle(id_)
except KeyError:
access = DirectObjectAccess(self._evaluator_weakref(), obj)
handle = AccessHandle(self, access, id_)
self.set_access_handle(handle)
return handle
def get_access_handle(self, id_):
return self._handles[id_]
def set_access_handle(self, handle):
self._handles[handle.id] = handle
class EvaluatorSameProcess(_EvaluatorProcess):
"""
Basically just an easy access to functions.py. It has the same API
as EvaluatorSubprocess and does the same thing without using a subprocess.
This is necessary for the Interpreter process.
"""
def __getattr__(self, name):
return partial(_get_function(name), self._evaluator_weakref())
class EvaluatorSubprocess(_EvaluatorProcess):
def __init__(self, evaluator, compiled_subprocess):
super(EvaluatorSubprocess, self).__init__(evaluator)
self._used = False
self._compiled_subprocess = compiled_subprocess
def __getattr__(self, name):
func = _get_function(name)
def wrapper(*args, **kwargs):
self._used = True
result = self._compiled_subprocess.run(
self._evaluator_weakref(),
func,
args=args,
kwargs=kwargs,
)
# IMO it should be possible to create a hook in pickle.load to
# mess with the loaded objects. However it's extremely complicated
# to work around this so just do it with this call. ~ dave
return self._convert_access_handles(result)
return wrapper
def _convert_access_handles(self, obj):
if isinstance(obj, SignatureParam):
return SignatureParam(*self._convert_access_handles(tuple(obj)))
elif isinstance(obj, tuple):
return tuple(self._convert_access_handles(o) for o in obj)
elif isinstance(obj, list):
return [self._convert_access_handles(o) for o in obj]
elif isinstance(obj, AccessHandle):
try:
# Rewrite the access handle to one we're already having.
obj = self.get_access_handle(obj.id)
except KeyError:
obj.add_subprocess(self)
self.set_access_handle(obj)
elif isinstance(obj, AccessPath):
return AccessPath(self._convert_access_handles(obj.accesses))
return obj
def __del__(self):
if self._used and not self._compiled_subprocess.is_crashed:
self._compiled_subprocess.delete_evaluator(self._evaluator_id)
class CompiledSubprocess(object):
is_crashed = False
# Start with 2, gets set after _get_info.
_pickle_protocol = 2
def __init__(self, executable):
self._executable = executable
self._evaluator_deletion_queue = queue.deque()
self._cleanup_callable = lambda: None
def __repr__(self):
pid = os.getpid()
return '<%s _executable=%r, _pickle_protocol=%r, is_crashed=%r, pid=%r>' % (
self.__class__.__name__,
self._executable,
self._pickle_protocol,
self.is_crashed,
pid,
)
@memoize_method
def _get_process(self):
debug.dbg('Start environment subprocess %s', self._executable)
parso_path = sys.modules['parso'].__file__
args = (
self._executable,
_MAIN_PATH,
os.path.dirname(os.path.dirname(parso_path)),
'.'.join(str(x) for x in sys.version_info[:3]),
)
process = GeneralizedPopen(
args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
# Use system default buffering on Python 2 to improve performance
# (this is already the case on Python 3).
bufsize=-1
)
self._stderr_queue = Queue()
self._stderr_thread = t = Thread(
target=_enqueue_output,
args=(process.stderr, self._stderr_queue)
)
t.daemon = True
t.start()
# Ensure the subprocess is properly cleaned up when the object
# is garbage collected.
self._cleanup_callable = weakref.finalize(self,
_cleanup_process,
process,
t)
return process
def run(self, evaluator, function, args=(), kwargs={}):
# Delete old evaluators.
while True:
try:
evaluator_id = self._evaluator_deletion_queue.pop()
except IndexError:
break
else:
self._send(evaluator_id, None)
assert callable(function)
return self._send(id(evaluator), function, args, kwargs)
def get_sys_path(self):
return self._send(None, functions.get_sys_path, (), {})
def _kill(self):
self.is_crashed = True
self._cleanup_callable()
def _send(self, evaluator_id, function, args=(), kwargs={}):
if self.is_crashed:
raise InternalError("The subprocess %s has crashed." % self._executable)
if not is_py3:
# Python 2 compatibility
kwargs = {force_unicode(key): value for key, value in kwargs.items()}
data = evaluator_id, function, args, kwargs
try:
pickle_dump(data, self._get_process().stdin, self._pickle_protocol)
except (socket.error, IOError) as e:
# Once Python2 will be removed we can just use `BrokenPipeError`.
# Also, somehow in windows it returns EINVAL instead of EPIPE if
# the subprocess dies.
if e.errno not in (errno.EPIPE, errno.EINVAL):
# Not a broken pipe
raise
self._kill()
raise InternalError("The subprocess %s was killed. Maybe out of memory?"
% self._executable)
try:
is_exception, traceback, result = pickle_load(self._get_process().stdout)
except EOFError as eof_error:
try:
stderr = self._get_process().stderr.read().decode('utf-8', 'replace')
except Exception as exc:
stderr = '<empty/not available (%r)>' % exc
self._kill()
_add_stderr_to_debug(self._stderr_queue)
raise InternalError(
"The subprocess %s has crashed (%r, stderr=%s)." % (
self._executable,
eof_error,
stderr,
))
_add_stderr_to_debug(self._stderr_queue)
if is_exception:
# Replace the attribute error message with a the traceback. It's
# way more informative.
result.args = (traceback,)
raise result
return result
def delete_evaluator(self, evaluator_id):
"""
Currently we are not deleting evalutors instantly. They only get
deleted once the subprocess is used again. It would probably a better
solution to move all of this into a thread. However, the memory usage
of a single evaluator shouldn't be that high.
"""
# With an argument - the evaluator gets deleted.
self._evaluator_deletion_queue.append(evaluator_id)
class Listener(object):
def __init__(self, pickle_protocol):
self._evaluators = {}
# TODO refactor so we don't need to process anymore just handle
# controlling.
self._process = _EvaluatorProcess(Listener)
self._pickle_protocol = pickle_protocol
def _get_evaluator(self, function, evaluator_id):
from jedi.evaluate import Evaluator
try:
evaluator = self._evaluators[evaluator_id]
except KeyError:
from jedi.api.environment import InterpreterEnvironment
evaluator = Evaluator(
# The project is not actually needed. Nothing should need to
# access it.
project=None,
environment=InterpreterEnvironment()
)
self._evaluators[evaluator_id] = evaluator
return evaluator
def _run(self, evaluator_id, function, args, kwargs):
if evaluator_id is None:
return function(*args, **kwargs)
elif function is None:
del self._evaluators[evaluator_id]
else:
evaluator = self._get_evaluator(function, evaluator_id)
# Exchange all handles
args = list(args)
for i, arg in enumerate(args):
if isinstance(arg, AccessHandle):
args[i] = evaluator.compiled_subprocess.get_access_handle(arg.id)
for key, value in kwargs.items():
if isinstance(value, AccessHandle):
kwargs[key] = evaluator.compiled_subprocess.get_access_handle(value.id)
return function(evaluator, *args, **kwargs)
def listen(self):
stdout = sys.stdout
# Mute stdout. Nobody should actually be able to write to it,
# because stdout is used for IPC.
sys.stdout = open(os.devnull, 'w')
stdin = sys.stdin
if sys.version_info[0] > 2:
stdout = stdout.buffer
stdin = stdin.buffer
# Python 2 opens streams in text mode on Windows. Set stdout and stdin
# to binary mode.
elif sys.platform == 'win32':
import msvcrt
msvcrt.setmode(stdout.fileno(), os.O_BINARY)
msvcrt.setmode(stdin.fileno(), os.O_BINARY)
while True:
try:
payload = pickle_load(stdin)
except EOFError:
# It looks like the parent process closed.
# Don't make a big fuss here and just exit.
exit(0)
try:
result = False, None, self._run(*payload)
except Exception as e:
result = True, traceback.format_exc(), e
pickle_dump(result, stdout, self._pickle_protocol)
class AccessHandle(object):
def __init__(self, subprocess, access, id_):
self.access = access
self._subprocess = subprocess
self.id = id_
def add_subprocess(self, subprocess):
self._subprocess = subprocess
def __repr__(self):
try:
detail = self.access
except AttributeError:
detail = '#' + str(self.id)
return '<%s of %s>' % (self.__class__.__name__, detail)
def __getstate__(self):
return self.id
def __setstate__(self, state):
self.id = state
def __getattr__(self, name):
if name in ('id', 'access') or name.startswith('_'):
raise AttributeError("Something went wrong with unpickling")
#if not is_py3: print >> sys.stderr, name
#print('getattr', name, file=sys.stderr)
return partial(self._workaround, force_unicode(name))
def _workaround(self, name, *args, **kwargs):
"""
TODO Currently we're passing slice objects around. This should not
happen. They are also the only unhashable objects that we're passing
around.
"""
if args and isinstance(args[0], slice):
return self._subprocess.get_compiled_method_return(self.id, name, *args, **kwargs)
return self._cached_results(name, *args, **kwargs)
@memoize_method
def _cached_results(self, name, *args, **kwargs):
#if type(self._subprocess) == EvaluatorSubprocess:
#print(name, args, kwargs,
#self._subprocess.get_compiled_method_return(self.id, name, *args, **kwargs)
#)
return self._subprocess.get_compiled_method_return(self.id, name, *args, **kwargs)
| [
"[email protected]"
] | |
7c344fcf6b9f60cc30778e1ef5ef3f5afc6f3ea0 | ba22f289ad1c49fb286105aeaa9abd8548907dc5 | /tempest/tests/lib/test_tempest_lib.py | d70e53dee8a7a2d9d91a0a5a5f89d4b72c3be367 | [
"Apache-2.0"
] | permissive | ssameerr/tempest | cf3f41b3aa07124a1bac69c3c3f2e393b52e671c | e413f28661c2aab3f8da8d005db1fa5c59cc6b68 | refs/heads/master | 2023-08-08T05:00:45.998493 | 2016-06-08T13:13:48 | 2016-06-08T13:13:48 | 60,715,004 | 0 | 0 | Apache-2.0 | 2023-02-15T02:18:34 | 2016-06-08T17:02:15 | Python | UTF-8 | Python | false | false | 780 | py | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_tempest.lib
----------------------------------
Tests for `tempest.lib` module.
"""
from tempest.tests import base
class TestTempest_lib(base.TestCase):
def test_something(self):
pass
| [
"[email protected]"
] | |
cf8da9eba1dbf16d89e8f4edbec9cf3a882a4551 | 746c4544fca849cddcf0a6c3efbcc2b1fd23eb6c | /charts/barchart_wage_age.py | f02a098e9f93d918e50ce35c0628046918cc5997 | [] | no_license | jonasthiel/st101-intro-statistics | 7603b1aaa42b7fcf7d01c8c81e62058fc44fbb16 | 6a240fc960129fbd44f7a840227d6c5560b054f3 | refs/heads/main | 2023-02-16T00:56:02.663632 | 2020-12-31T20:42:42 | 2020-12-31T20:42:42 | 325,868,941 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,623 | py | #Write a line of code to plot a barchart of Wage grouped by Age
from plotting import *
Age=[25, 26, 33, 29, 27, 21, 26, 35, 21, 37, 21, 38, 18, 19, 36, 30, 29, 24, 24, 36, 36, 27, 33, 23, 21, 26, 27, 27, 24, 26, 25, 24, 22, 25, 40, 39, 19, 31, 33, 30, 33, 27, 40, 32, 31, 35, 26, 34, 27, 34, 33, 20, 19, 40, 39, 39, 37, 18, 35, 20, 28, 31, 30, 29, 31, 18, 40, 20, 32, 20, 34, 34, 25, 29, 40, 40, 39, 36, 39, 34, 34, 35, 39, 38, 33, 32, 21, 29, 36, 33, 30, 39, 21, 19, 38, 30, 40, 36, 34, 28, 37, 29, 39, 25, 36, 33, 37, 19, 28, 26, 18, 22, 40, 20, 40, 20, 39, 29, 26, 26, 22, 37, 34, 29, 24, 23, 21, 19, 29, 30, 23, 40, 30, 30, 19, 39, 39, 25, 36, 38, 24, 32, 34, 33, 36, 30, 35, 26, 28, 23, 25, 23, 40, 20, 26, 26, 22, 23, 18, 36, 34, 36, 35, 40, 39, 39, 33, 22, 37, 20, 37, 35, 20, 23, 37, 32, 25, 35, 35, 22, 21, 31, 40, 26, 24, 29, 37, 19, 33, 31, 29, 27, 21, 19, 39, 34, 34, 40, 26, 39, 35, 31, 35, 24, 19, 27, 27, 20, 28, 30, 23, 21, 20, 26, 31, 24, 25, 25, 22, 32, 28, 36, 21, 38, 18, 25, 21, 33, 40, 19, 38, 33, 37, 32, 31, 31, 38, 19, 37, 37, 32, 36, 34, 35, 35, 35, 37, 35, 39, 34, 24, 25, 18, 40, 33, 32, 23, 25, 19, 39, 38, 36, 32, 27, 22, 40, 28, 29, 25, 36, 26, 28, 32, 34, 34, 21, 21, 32, 19, 35, 30, 35, 26, 31, 38, 34, 33, 35, 37, 38, 36, 40, 22, 30, 28, 28, 29, 36, 24, 28, 28, 28, 26, 21, 35, 22, 32, 28, 19, 33, 18, 22, 36, 26, 19, 26, 30, 27, 28, 24, 36, 37, 20, 32, 38, 39, 38, 30, 32, 30, 26, 23, 19, 29, 33, 34, 23, 30, 32, 40, 36, 29, 39, 34, 34, 22, 22, 22, 36, 38, 38, 30, 26, 40, 34, 21, 34, 38, 32, 35, 35, 26, 28, 20, 40, 23, 24, 26, 24, 39, 21, 33, 31, 39, 39, 20, 22, 18, 23, 36, 32, 37, 36, 26, 30, 30, 30, 21, 22, 40, 38, 22, 27, 23, 21, 22, 20, 30, 31, 40, 19, 32, 24, 21, 27, 32, 30, 34, 18, 25, 22, 40, 23, 19, 24, 24, 25, 40, 27, 29, 22, 39, 38, 34, 39, 30, 31, 33, 34, 25, 20, 20, 20, 20, 24, 19, 21, 31, 31, 29, 38, 39, 33, 40, 24, 38, 37, 18, 24, 38, 38, 22, 40, 21, 36, 30, 21, 30, 35, 20, 25, 25, 29, 30, 20, 29, 29, 31, 20, 26, 26, 38, 37, 39, 31, 35, 36, 30, 38, 36, 23, 39, 39, 20, 30, 34, 21, 23, 21, 33, 30, 33, 32, 36, 18, 31, 32, 25, 23, 23, 21, 34, 18, 40, 21, 29, 29, 21, 38, 35, 38, 32, 38, 27, 23, 33, 29, 19, 20, 35, 29, 27, 28, 20, 40, 35, 40, 40, 20, 36, 38, 28, 30, 30, 36, 29, 27, 25, 33, 19, 27, 28, 34, 36, 27, 40, 38, 37, 31, 33, 38, 36, 25, 23, 22, 23, 34, 26, 24, 28, 32, 22, 18, 29, 19, 21, 27, 28, 35, 30, 40, 28, 37, 34, 24, 40, 33, 29, 30, 36, 25, 26, 26, 28, 34, 39, 34, 26, 24, 33, 38, 37, 36, 34, 37, 33, 25, 27, 30, 26, 21, 40, 26, 25, 25, 40, 28, 35, 36, 39, 33, 36, 40, 32, 36, 26, 24, 36, 27, 28, 26, 37, 36, 37, 36, 20, 34, 30, 32, 40, 20, 31, 23, 27, 19, 24, 23, 24, 25, 36, 26, 33, 30, 27, 26, 28, 28, 21, 31, 24, 27, 24, 29, 29, 28, 22, 20, 23, 35, 30, 37, 31, 31, 21, 32, 29, 27, 27, 30, 39, 34, 23, 35, 39, 27, 40, 28, 36, 35, 38, 21, 18, 21, 38, 37, 24, 21, 25, 35, 27, 35, 24, 36, 32, 20]
Wage=[17000, 13000, 28000, 45000, 28000, 1200, 15500, 26400, 14000, 35000, 16400, 50000, 2600, 9000, 27000, 150000, 32000, 22000, 65000, 56000, 6500, 30000, 70000, 9000, 6000, 34000, 40000, 30000, 6400, 87000, 20000, 45000, 4800, 34000, 75000, 26000, 4000, 50000, 63000, 14700, 45000, 42000, 10000, 40000, 70000, 14000, 54000, 14000, 23000, 24400, 27900, 4700, 8000, 19000, 17300, 45000, 3900, 2900, 138000, 2100, 60000, 55000, 45000, 40000, 45700, 90000, 40000, 13000, 30000, 2000, 75000, 60000, 70000, 41000, 42000, 31000, 39000, 104000, 52000, 20000, 59000, 66000, 63000, 32000, 11000, 16000, 6400, 17000, 47700, 5000, 25000, 35000, 20000, 14000, 29000, 267000, 31000, 27000, 64000, 39600, 267000, 7100, 33000, 31500, 40000, 23000, 3000, 14000, 44000, 15100, 2600, 6200, 50000, 3000, 25000, 2000, 38000, 22000, 20000, 2500, 1500, 42000, 30000, 27000, 7000, 11900, 27000, 24000, 4300, 30200, 2500, 30000, 70000, 38700, 8000, 36000, 66000, 24000, 95000, 39000, 20000, 23000, 56000, 25200, 62000, 12000, 13000, 35000, 35000, 14000, 24000, 12000, 14000, 31000, 40000, 22900, 12000, 14000, 1600, 12000, 80000, 90000, 126000, 1600, 100000, 8000, 71000, 40000, 42000, 40000, 120000, 35000, 1200, 4000, 32000, 8000, 14500, 65000, 15000, 3000, 2000, 23900, 1000, 22000, 18200, 8000, 30000, 23000, 30000, 27000, 70000, 40000, 18000, 3100, 57000, 25000, 32000, 10000, 4000, 49000, 93000, 35000, 49000, 40000, 5500, 30000, 25000, 5700, 6000, 30000, 42900, 8000, 5300, 90000, 85000, 15000, 17000, 5600, 11500, 52000, 1000, 42000, 2100, 50000, 1500, 40000, 28000, 5300, 149000, 3200, 12000, 83000, 45000, 31200, 25000, 72000, 70000, 7000, 23000, 40000, 40000, 28000, 10000, 48000, 20000, 60000, 19000, 25000, 39000, 68000, 2300, 23900, 5000, 16300, 80000, 45000, 12000, 9000, 1300, 35000, 35000, 47000, 32000, 18000, 20000, 20000, 23400, 48000, 8000, 5200, 33500, 22000, 22000, 52000, 104000, 28000, 13000, 12000, 15000, 53000, 27000, 50000, 13900, 23000, 28100, 23000, 12000, 55000, 83000, 31000, 33200, 45000, 3000, 18000, 11000, 41000, 36000, 33600, 38000, 45000, 53000, 24000, 3000, 37500, 7700, 4800, 29000, 6600, 12400, 20000, 2000, 1100, 55000, 13400, 10000, 6000, 6000, 16000, 19000, 8300, 52000, 58000, 27000, 25000, 80000, 10000, 22000, 18000, 21000, 8000, 15200, 15000, 5000, 50000, 89000, 7000, 65000, 58000, 42000, 55000, 40000, 14000, 36000, 30000, 7900, 6000, 1200, 10000, 54000, 12800, 35000, 34000, 40000, 45000, 9600, 3300, 39000, 22000, 40000, 68000, 24400, 1000, 10800, 8400, 50000, 22000, 20000, 20000, 1300, 9000, 14200, 32000, 65000, 18000, 18000, 3000, 16700, 1500, 1400, 15000, 55000, 42000, 70000, 35000, 21600, 5800, 35000, 5700, 1700, 40000, 40000, 45000, 25000, 13000, 6400, 11000, 4200, 30000, 32000, 120000, 10000, 19000, 12000, 13000, 37000, 40000, 38000, 60000, 3100, 16000, 18000, 130000, 5000, 5000, 35000, 1000, 14300, 100000, 20000, 33000, 8000, 9400, 87000, 2500, 12000, 12000, 33000, 16500, 25500, 7200, 2300, 3100, 2100, 3200, 45000, 40000, 3800, 30000, 12000, 62000, 45000, 46000, 50000, 40000, 13000, 50000, 23000, 4000, 40000, 25000, 16000, 3000, 80000, 27000, 68000, 3500, 1300, 10000, 46000, 5800, 24000, 12500, 50000, 48000, 29000, 19000, 26000, 30000, 10000, 10000, 20000, 43000, 105000, 55000, 5000, 65000, 68000, 38000, 47000, 48700, 6100, 55000, 30000, 5000, 3500, 23400, 11400, 7000, 1300, 80000, 65000, 45000, 19000, 3000, 17100, 22900, 31200, 35000, 3000, 5000, 1000, 36000, 4800, 60000, 9800, 30000, 85000, 18000, 24000, 60000, 30000, 2000, 39000, 12000, 10500, 60000, 36000, 10500, 3600, 1200, 28600, 48000, 20800, 5400, 9600, 30000, 30000, 20000, 6700, 30000, 3200, 42000, 37000, 5000, 18000, 20000, 14000, 12000, 18000, 3000, 13500, 35000, 38000, 30000, 36000, 66000, 45000, 32000, 46000, 80000, 27000, 4000, 21000, 7600, 16000, 10300, 27000, 19000, 14000, 19000, 3100, 20000, 2700, 27000, 7000, 13600, 75000, 35000, 36000, 25000, 6000, 36000, 50000, 46000, 3000, 37000, 40000, 30000, 48800, 19700, 16000, 14000, 12000, 25000, 25000, 28600, 17000, 31200, 57000, 23000, 23500, 46000, 18700, 26700, 9900, 16000, 3000, 52000, 51000, 14000, 14400, 27000, 26000, 60000, 25000, 6000, 20000, 3000, 69000, 24800, 12000, 3100, 18000, 20000, 267000, 28000, 9800, 18200, 80000, 6800, 21100, 20000, 68000, 20000, 45000, 8000, 40000, 31900, 28000, 24000, 2000, 32000, 11000, 20000, 5900, 16100, 23900, 40000, 37500, 11000, 55000, 37500, 60000, 23000, 9500, 34500, 4000, 9000, 11200, 35200, 30000, 18000, 21800, 19700, 16700, 12500, 11300, 4000, 39000, 32000, 14000, 65000, 50000, 2000, 30400, 22000, 1600, 56000, 40000, 85000, 9000, 10000, 19000, 5300, 5200, 43000, 60000, 50000, 38000, 267000, 15600, 1800, 17000, 45000, 31000, 5000, 8000, 43000, 103000, 45000, 8800, 26000, 47000, 40000, 8000]
barchart(Age, Wage) | [
"[email protected]"
] | |
ebaad1711387e4345c65fa0681b7f399491d8301 | 993ef8924418866f932396a58e3ad0c2a940ddd3 | /Production/python/PrivateSamples/EMJ_UL17_mMed-2200_mDark-10_ctau-0p1_unflavored-down_cff.py | 5243ead8ffb566b86b44cde4297d9248e99657be | [] | no_license | TreeMaker/TreeMaker | 48d81f6c95a17828dbb599d29c15137cd6ef009a | 15dd7fe9e9e6f97d9e52614c900c27d200a6c45f | refs/heads/Run2_UL | 2023-07-07T15:04:56.672709 | 2023-07-03T16:43:17 | 2023-07-03T16:43:17 | 29,192,343 | 16 | 92 | null | 2023-07-03T16:43:28 | 2015-01-13T13:59:30 | Python | UTF-8 | Python | false | false | 1,981 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL17/step4_MINIAODv2_mMed-2200_mDark-10_ctau-0p1_unflavored-down_n-500_part-1.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL17/step4_MINIAODv2_mMed-2200_mDark-10_ctau-0p1_unflavored-down_n-500_part-10.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL17/step4_MINIAODv2_mMed-2200_mDark-10_ctau-0p1_unflavored-down_n-500_part-2.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL17/step4_MINIAODv2_mMed-2200_mDark-10_ctau-0p1_unflavored-down_n-500_part-3.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL17/step4_MINIAODv2_mMed-2200_mDark-10_ctau-0p1_unflavored-down_n-500_part-4.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL17/step4_MINIAODv2_mMed-2200_mDark-10_ctau-0p1_unflavored-down_n-500_part-5.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL17/step4_MINIAODv2_mMed-2200_mDark-10_ctau-0p1_unflavored-down_n-500_part-6.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL17/step4_MINIAODv2_mMed-2200_mDark-10_ctau-0p1_unflavored-down_n-500_part-7.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL17/step4_MINIAODv2_mMed-2200_mDark-10_ctau-0p1_unflavored-down_n-500_part-8.root',
'root://cmseos.fnal.gov///store/group/lpcsusyhad/ExoEMJAnalysis2020/Signal.Oct.2021/UL17/step4_MINIAODv2_mMed-2200_mDark-10_ctau-0p1_unflavored-down_n-500_part-9.root',
] )
| [
"[email protected]"
] | |
45c7d12a50f07cacbd16780c5e103cca1dab389d | 8dc64db8a0d7ddb8778c8eae2dac9075b9a90e2b | /env/Lib/site-packages/googleapiclient/discovery.py | 6363809f1e5bc5de0bd293b63699f37f33542a5e | [
"MIT"
] | permissive | theXtroyer1221/Cloud-buffer | c3992d1b543a1f11fde180f6f7d988d28b8f9684 | 37eabdd78c15172ea980b59d1aff65d8628cb845 | refs/heads/master | 2022-11-22T22:37:10.453923 | 2022-02-25T01:15:57 | 2022-02-25T01:15:57 | 240,901,269 | 1 | 1 | MIT | 2022-09-04T14:48:02 | 2020-02-16T14:00:32 | HTML | UTF-8 | Python | false | false | 59,078 | py | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for discovery based APIs.
A client library for Google's discovery based APIs.
"""
from __future__ import absolute_import
import six
from six.moves import zip
__author__ = "[email protected] (Joe Gregorio)"
__all__ = ["build", "build_from_document", "fix_method_name", "key2param"]
from six import BytesIO
from six.moves import http_client
from six.moves.urllib.parse import urlencode, urlparse, urljoin, urlunparse, parse_qsl
# Standard library imports
import copy
from collections import OrderedDict
try:
from email.generator import BytesGenerator
except ImportError:
from email.generator import Generator as BytesGenerator
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
import json
import keyword
import logging
import mimetypes
import os
import re
# Third-party imports
import httplib2
import uritemplate
import google.api_core.client_options
from google.auth.transport import mtls
from google.auth.exceptions import MutualTLSChannelError
try:
import google_auth_httplib2
except ImportError: # pragma: NO COVER
google_auth_httplib2 = None
# Local imports
from googleapiclient import _auth
from googleapiclient import mimeparse
from googleapiclient.errors import HttpError
from googleapiclient.errors import InvalidJsonError
from googleapiclient.errors import MediaUploadSizeError
from googleapiclient.errors import UnacceptableMimeTypeError
from googleapiclient.errors import UnknownApiNameOrVersion
from googleapiclient.errors import UnknownFileType
from googleapiclient.http import build_http
from googleapiclient.http import BatchHttpRequest
from googleapiclient.http import HttpMock
from googleapiclient.http import HttpMockSequence
from googleapiclient.http import HttpRequest
from googleapiclient.http import MediaFileUpload
from googleapiclient.http import MediaUpload
from googleapiclient.model import JsonModel
from googleapiclient.model import MediaModel
from googleapiclient.model import RawModel
from googleapiclient.schema import Schemas
from googleapiclient._helpers import _add_query_parameter
from googleapiclient._helpers import positional
# The client library requires a version of httplib2 that supports RETRIES.
httplib2.RETRIES = 1
logger = logging.getLogger(__name__)
URITEMPLATE = re.compile("{[^}]*}")
VARNAME = re.compile("[a-zA-Z0-9_-]+")
DISCOVERY_URI = (
"https://www.googleapis.com/discovery/v1/apis/" "{api}/{apiVersion}/rest"
)
V1_DISCOVERY_URI = DISCOVERY_URI
V2_DISCOVERY_URI = (
"https://{api}.googleapis.com/$discovery/rest?" "version={apiVersion}"
)
DEFAULT_METHOD_DOC = "A description of how to use this function"
HTTP_PAYLOAD_METHODS = frozenset(["PUT", "POST", "PATCH"])
_MEDIA_SIZE_BIT_SHIFTS = {"KB": 10, "MB": 20, "GB": 30, "TB": 40}
BODY_PARAMETER_DEFAULT_VALUE = {"description": "The request body.", "type": "object"}
MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
"description": (
"The filename of the media request body, or an instance "
"of a MediaUpload object."
),
"type": "string",
"required": False,
}
MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE = {
"description": (
"The MIME type of the media request body, or an instance "
"of a MediaUpload object."
),
"type": "string",
"required": False,
}
_PAGE_TOKEN_NAMES = ("pageToken", "nextPageToken")
# Parameters controlling mTLS behavior. See https://google.aip.dev/auth/4114.
GOOGLE_API_USE_CLIENT_CERTIFICATE = "GOOGLE_API_USE_CLIENT_CERTIFICATE"
GOOGLE_API_USE_MTLS_ENDPOINT = "GOOGLE_API_USE_MTLS_ENDPOINT"
# Parameters accepted by the stack, but not visible via discovery.
# TODO(dhermes): Remove 'userip' in 'v2'.
STACK_QUERY_PARAMETERS = frozenset(["trace", "pp", "userip", "strict"])
STACK_QUERY_PARAMETER_DEFAULT_VALUE = {"type": "string", "location": "query"}
# Library-specific reserved words beyond Python keywords.
RESERVED_WORDS = frozenset(["body"])
# patch _write_lines to avoid munging '\r' into '\n'
# ( https://bugs.python.org/issue18886 https://bugs.python.org/issue19003 )
class _BytesGenerator(BytesGenerator):
_write_lines = BytesGenerator.write
def fix_method_name(name):
"""Fix method names to avoid '$' characters and reserved word conflicts.
Args:
name: string, method name.
Returns:
The name with '_' appended if the name is a reserved word and '$' and '-'
replaced with '_'.
"""
name = name.replace("$", "_").replace("-", "_")
if keyword.iskeyword(name) or name in RESERVED_WORDS:
return name + "_"
else:
return name
def key2param(key):
"""Converts key names into parameter names.
For example, converting "max-results" -> "max_results"
Args:
key: string, the method key name.
Returns:
A safe method name based on the key name.
"""
result = []
key = list(key)
if not key[0].isalpha():
result.append("x")
for c in key:
if c.isalnum():
result.append(c)
else:
result.append("_")
return "".join(result)
@positional(2)
def build(
serviceName,
version,
http=None,
discoveryServiceUrl=DISCOVERY_URI,
developerKey=None,
model=None,
requestBuilder=HttpRequest,
credentials=None,
cache_discovery=True,
cache=None,
client_options=None,
adc_cert_path=None,
adc_key_path=None,
num_retries=1,
):
"""Construct a Resource for interacting with an API.
Construct a Resource object for interacting with an API. The serviceName and
version are the names from the Discovery service.
Args:
serviceName: string, name of the service.
version: string, the version of the service.
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
discoveryServiceUrl: string, a URI Template that points to the location of
the discovery service. It should have two parameters {api} and
{apiVersion} that when filled in produce an absolute URI to the discovery
document for that service.
developerKey: string, key obtained from
https://code.google.com/apis/console.
model: googleapiclient.Model, converts to and from the wire format.
requestBuilder: googleapiclient.http.HttpRequest, encapsulator for an HTTP
request.
credentials: oauth2client.Credentials or
google.auth.credentials.Credentials, credentials to be used for
authentication.
cache_discovery: Boolean, whether or not to cache the discovery doc.
cache: googleapiclient.discovery_cache.base.CacheBase, an optional
cache object for the discovery documents.
client_options: Mapping object or google.api_core.client_options, client
options to set user options on the client.
(1) The API endpoint should be set through client_options. If API endpoint
is not set, `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable can be used
to control which endpoint to use.
(2) client_cert_source is not supported, client cert should be provided using
client_encrypted_cert_source instead. In order to use the provided client
cert, `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be
set to `true`.
More details on the environment variables are here:
https://google.aip.dev/auth/4114
adc_cert_path: str, client certificate file path to save the application
default client certificate for mTLS. This field is required if you want to
use the default client certificate. `GOOGLE_API_USE_CLIENT_CERTIFICATE`
environment variable must be set to `true` in order to use this field,
otherwise this field doesn't nothing.
More details on the environment variables are here:
https://google.aip.dev/auth/4114
adc_key_path: str, client encrypted private key file path to save the
application default client encrypted private key for mTLS. This field is
required if you want to use the default client certificate.
`GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be set to
`true` in order to use this field, otherwise this field doesn't nothing.
More details on the environment variables are here:
https://google.aip.dev/auth/4114
num_retries: Integer, number of times to retry discovery with
randomized exponential backoff in case of intermittent/connection issues.
Returns:
A Resource object with methods for interacting with the service.
Raises:
google.auth.exceptions.MutualTLSChannelError: if there are any problems
setting up mutual TLS channel.
"""
params = {"api": serviceName, "apiVersion": version}
if http is None:
discovery_http = build_http()
else:
discovery_http = http
service = None
for discovery_url in _discovery_service_uri_options(discoveryServiceUrl, version):
requested_url = uritemplate.expand(discovery_url, params)
try:
content = _retrieve_discovery_doc(
requested_url,
discovery_http,
cache_discovery,
cache,
developerKey,
num_retries=num_retries,
)
service = build_from_document(
content,
base=discovery_url,
http=http,
developerKey=developerKey,
model=model,
requestBuilder=requestBuilder,
credentials=credentials,
client_options=client_options,
adc_cert_path=adc_cert_path,
adc_key_path=adc_key_path,
)
break # exit if a service was created
except HttpError as e:
if e.resp.status == http_client.NOT_FOUND:
continue
else:
raise e
# If discovery_http was created by this function, we are done with it
# and can safely close it
if http is None:
discovery_http.close()
if service is None:
raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName, version))
else:
return service
def _discovery_service_uri_options(discoveryServiceUrl, version):
"""
Returns Discovery URIs to be used for attemnting to build the API Resource.
Args:
discoveryServiceUrl:
string, the Original Discovery Service URL preferred by the customer.
version:
string, API Version requested
Returns:
A list of URIs to be tried for the Service Discovery, in order.
"""
urls = [discoveryServiceUrl, V2_DISCOVERY_URI]
# V1 Discovery won't work if the requested version is None
if discoveryServiceUrl == V1_DISCOVERY_URI and version is None:
logger.warning(
"Discovery V1 does not support empty versions. Defaulting to V2..."
)
urls.pop(0)
return list(OrderedDict.fromkeys(urls))
def _retrieve_discovery_doc(
url, http, cache_discovery, cache=None, developerKey=None, num_retries=1
):
"""Retrieves the discovery_doc from cache or the internet.
Args:
url: string, the URL of the discovery document.
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it through which HTTP requests will be made.
cache_discovery: Boolean, whether or not to cache the discovery doc.
cache: googleapiclient.discovery_cache.base.Cache, an optional cache
object for the discovery documents.
developerKey: string, Key for controlling API usage, generated
from the API Console.
num_retries: Integer, number of times to retry discovery with
randomized exponential backoff in case of intermittent/connection issues.
Returns:
A unicode string representation of the discovery document.
"""
if cache_discovery:
from . import discovery_cache
if cache is None:
cache = discovery_cache.autodetect()
if cache:
content = cache.get(url)
if content:
return content
actual_url = url
# REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
# variable that contains the network address of the client sending the
# request. If it exists then add that to the request for the discovery
# document to avoid exceeding the quota on discovery requests.
if "REMOTE_ADDR" in os.environ:
actual_url = _add_query_parameter(url, "userIp", os.environ["REMOTE_ADDR"])
if developerKey:
actual_url = _add_query_parameter(url, "key", developerKey)
logger.debug("URL being requested: GET %s", actual_url)
# Execute this request with retries build into HttpRequest
# Note that it will already raise an error if we don't get a 2xx response
req = HttpRequest(http, HttpRequest.null_postproc, actual_url)
resp, content = req.execute(num_retries=num_retries)
try:
content = content.decode("utf-8")
except AttributeError:
pass
try:
service = json.loads(content)
except ValueError as e:
logger.error("Failed to parse as JSON: " + content)
raise InvalidJsonError()
if cache_discovery and cache:
cache.set(url, content)
return content
@positional(1)
def build_from_document(
service,
base=None,
future=None,
http=None,
developerKey=None,
model=None,
requestBuilder=HttpRequest,
credentials=None,
client_options=None,
adc_cert_path=None,
adc_key_path=None,
):
"""Create a Resource for interacting with an API.
Same as `build()`, but constructs the Resource object from a discovery
document that is it given, as opposed to retrieving one over HTTP.
Args:
service: string or object, the JSON discovery document describing the API.
The value passed in may either be the JSON string or the deserialized
JSON.
base: string, base URI for all HTTP requests, usually the discovery URI.
This parameter is no longer used as rootUrl and servicePath are included
within the discovery document. (deprecated)
future: string, discovery document with future capabilities (deprecated).
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
developerKey: string, Key for controlling API usage, generated
from the API Console.
model: Model class instance that serializes and de-serializes requests and
responses.
requestBuilder: Takes an http request and packages it up to be executed.
credentials: oauth2client.Credentials or
google.auth.credentials.Credentials, credentials to be used for
authentication.
client_options: Mapping object or google.api_core.client_options, client
options to set user options on the client.
(1) The API endpoint should be set through client_options. If API endpoint
is not set, `GOOGLE_API_USE_MTLS_ENDPOINT` environment variable can be used
to control which endpoint to use.
(2) client_cert_source is not supported, client cert should be provided using
client_encrypted_cert_source instead. In order to use the provided client
cert, `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be
set to `true`.
More details on the environment variables are here:
https://google.aip.dev/auth/4114
adc_cert_path: str, client certificate file path to save the application
default client certificate for mTLS. This field is required if you want to
use the default client certificate. `GOOGLE_API_USE_CLIENT_CERTIFICATE`
environment variable must be set to `true` in order to use this field,
otherwise this field doesn't nothing.
More details on the environment variables are here:
https://google.aip.dev/auth/4114
adc_key_path: str, client encrypted private key file path to save the
application default client encrypted private key for mTLS. This field is
required if you want to use the default client certificate.
`GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable must be set to
`true` in order to use this field, otherwise this field doesn't nothing.
More details on the environment variables are here:
https://google.aip.dev/auth/4114
Returns:
A Resource object with methods for interacting with the service.
Raises:
google.auth.exceptions.MutualTLSChannelError: if there are any problems
setting up mutual TLS channel.
"""
if client_options is None:
client_options = google.api_core.client_options.ClientOptions()
if isinstance(client_options, six.moves.collections_abc.Mapping):
client_options = google.api_core.client_options.from_dict(client_options)
if http is not None:
# if http is passed, the user cannot provide credentials
banned_options = [
(credentials, "credentials"),
(client_options.credentials_file, "client_options.credentials_file"),
]
for option, name in banned_options:
if option is not None:
raise ValueError("Arguments http and {} are mutually exclusive".format(name))
if isinstance(service, six.string_types):
service = json.loads(service)
elif isinstance(service, six.binary_type):
service = json.loads(service.decode("utf-8"))
if "rootUrl" not in service and isinstance(http, (HttpMock, HttpMockSequence)):
logger.error(
"You are using HttpMock or HttpMockSequence without"
+ "having the service discovery doc in cache. Try calling "
+ "build() without mocking once first to populate the "
+ "cache."
)
raise InvalidJsonError()
# If an API Endpoint is provided on client options, use that as the base URL
base = urljoin(service["rootUrl"], service["servicePath"])
if client_options.api_endpoint:
base = client_options.api_endpoint
schema = Schemas(service)
# If the http client is not specified, then we must construct an http client
# to make requests. If the service has scopes, then we also need to setup
# authentication.
if http is None:
# Does the service require scopes?
scopes = list(
service.get("auth", {}).get("oauth2", {}).get("scopes", {}).keys()
)
# If so, then the we need to setup authentication if no developerKey is
# specified.
if scopes and not developerKey:
# Make sure the user didn't pass multiple credentials
if client_options.credentials_file and credentials:
raise google.api_core.exceptions.DuplicateCredentialArgs(
"client_options.credentials_file and credentials are mutually exclusive."
)
# Check for credentials file via client options
if client_options.credentials_file:
credentials = _auth.credentials_from_file(
client_options.credentials_file,
scopes=client_options.scopes,
quota_project_id=client_options.quota_project_id,
)
# If the user didn't pass in credentials, attempt to acquire application
# default credentials.
if credentials is None:
credentials = _auth.default_credentials(
scopes=client_options.scopes,
quota_project_id=client_options.quota_project_id,
)
# The credentials need to be scoped.
# If the user provided scopes via client_options don't override them
if not client_options.scopes:
credentials = _auth.with_scopes(credentials, scopes)
# If credentials are provided, create an authorized http instance;
# otherwise, skip authentication.
if credentials:
http = _auth.authorized_http(credentials)
# If the service doesn't require scopes then there is no need for
# authentication.
else:
http = build_http()
# Obtain client cert and create mTLS http channel if cert exists.
client_cert_to_use = None
use_client_cert = os.getenv(GOOGLE_API_USE_CLIENT_CERTIFICATE, "false")
if not use_client_cert in ("true", "false"):
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_CLIENT_CERTIFICATE value. Accepted values: true, false"
)
if client_options and client_options.client_cert_source:
raise MutualTLSChannelError(
"ClientOptions.client_cert_source is not supported, please use ClientOptions.client_encrypted_cert_source."
)
if use_client_cert == "true":
if (
client_options
and hasattr(client_options, "client_encrypted_cert_source")
and client_options.client_encrypted_cert_source
):
client_cert_to_use = client_options.client_encrypted_cert_source
elif (
adc_cert_path and adc_key_path and mtls.has_default_client_cert_source()
):
client_cert_to_use = mtls.default_client_encrypted_cert_source(
adc_cert_path, adc_key_path
)
if client_cert_to_use:
cert_path, key_path, passphrase = client_cert_to_use()
# The http object we built could be google_auth_httplib2.AuthorizedHttp
# or httplib2.Http. In the first case we need to extract the wrapped
# httplib2.Http object from google_auth_httplib2.AuthorizedHttp.
http_channel = (
http.http
if google_auth_httplib2
and isinstance(http, google_auth_httplib2.AuthorizedHttp)
else http
)
http_channel.add_certificate(key_path, cert_path, "", passphrase)
# If user doesn't provide api endpoint via client options, decide which
# api endpoint to use.
if "mtlsRootUrl" in service and (
not client_options or not client_options.api_endpoint
):
mtls_endpoint = urljoin(service["mtlsRootUrl"], service["servicePath"])
use_mtls_endpoint = os.getenv(GOOGLE_API_USE_MTLS_ENDPOINT, "auto")
if not use_mtls_endpoint in ("never", "auto", "always"):
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Switch to mTLS endpoint, if environment variable is "always", or
# environment varibable is "auto" and client cert exists.
if use_mtls_endpoint == "always" or (
use_mtls_endpoint == "auto" and client_cert_to_use
):
base = mtls_endpoint
if model is None:
features = service.get("features", [])
model = JsonModel("dataWrapper" in features)
return Resource(
http=http,
baseUrl=base,
model=model,
developerKey=developerKey,
requestBuilder=requestBuilder,
resourceDesc=service,
rootDesc=service,
schema=schema,
)
def _cast(value, schema_type):
"""Convert value to a string based on JSON Schema type.
See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
JSON Schema.
Args:
value: any, the value to convert
schema_type: string, the type that value should be interpreted as
Returns:
A string representation of 'value' based on the schema_type.
"""
if schema_type == "string":
if type(value) == type("") or type(value) == type(u""):
return value
else:
return str(value)
elif schema_type == "integer":
return str(int(value))
elif schema_type == "number":
return str(float(value))
elif schema_type == "boolean":
return str(bool(value)).lower()
else:
if type(value) == type("") or type(value) == type(u""):
return value
else:
return str(value)
def _media_size_to_long(maxSize):
"""Convert a string media size, such as 10GB or 3TB into an integer.
Args:
maxSize: string, size as a string, such as 2MB or 7GB.
Returns:
The size as an integer value.
"""
if len(maxSize) < 2:
return 0
units = maxSize[-2:].upper()
bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
if bit_shift is not None:
return int(maxSize[:-2]) << bit_shift
else:
return int(maxSize)
def _media_path_url_from_info(root_desc, path_url):
"""Creates an absolute media path URL.
Constructed using the API root URI and service path from the discovery
document and the relative path for the API method.
Args:
root_desc: Dictionary; the entire original deserialized discovery document.
path_url: String; the relative URL for the API method. Relative to the API
root, which is specified in the discovery document.
Returns:
String; the absolute URI for media upload for the API method.
"""
return "%(root)supload/%(service_path)s%(path)s" % {
"root": root_desc["rootUrl"],
"service_path": root_desc["servicePath"],
"path": path_url,
}
def _fix_up_parameters(method_desc, root_desc, http_method, schema):
"""Updates parameters of an API method with values specific to this library.
Specifically, adds whatever global parameters are specified by the API to the
parameters for the individual method. Also adds parameters which don't
appear in the discovery document, but are available to all discovery based
APIs (these are listed in STACK_QUERY_PARAMETERS).
SIDE EFFECTS: This updates the parameters dictionary object in the method
description.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
http_method: String; the HTTP method used to call the API method described
in method_desc.
schema: Object, mapping of schema names to schema descriptions.
Returns:
The updated Dictionary stored in the 'parameters' key of the method
description dictionary.
"""
parameters = method_desc.setdefault("parameters", {})
# Add in the parameters common to all methods.
for name, description in six.iteritems(root_desc.get("parameters", {})):
parameters[name] = description
# Add in undocumented query parameters.
for name in STACK_QUERY_PARAMETERS:
parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
# Add 'body' (our own reserved word) to parameters if the method supports
# a request payload.
if http_method in HTTP_PAYLOAD_METHODS and "request" in method_desc:
body = BODY_PARAMETER_DEFAULT_VALUE.copy()
body.update(method_desc["request"])
parameters["body"] = body
return parameters
def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
"""Adds 'media_body' and 'media_mime_type' parameters if supported by method.
SIDE EFFECTS: If there is a 'mediaUpload' in the method description, adds
'media_upload' key to parameters.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
path_url: String; the relative URL for the API method. Relative to the API
root, which is specified in the discovery document.
parameters: A dictionary describing method parameters for method described
in method_desc.
Returns:
Triple (accept, max_size, media_path_url) where:
- accept is a list of strings representing what content types are
accepted for media upload. Defaults to empty list if not in the
discovery document.
- max_size is a long representing the max size in bytes allowed for a
media upload. Defaults to 0L if not in the discovery document.
- media_path_url is a String; the absolute URI for media upload for the
API method. Constructed using the API root URI and service path from
the discovery document and the relative path for the API method. If
media upload is not supported, this is None.
"""
media_upload = method_desc.get("mediaUpload", {})
accept = media_upload.get("accept", [])
max_size = _media_size_to_long(media_upload.get("maxSize", ""))
media_path_url = None
if media_upload:
media_path_url = _media_path_url_from_info(root_desc, path_url)
parameters["media_body"] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
parameters["media_mime_type"] = MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE.copy()
return accept, max_size, media_path_url
def _fix_up_method_description(method_desc, root_desc, schema):
"""Updates a method description in a discovery document.
SIDE EFFECTS: Changes the parameters dictionary in the method description with
extra parameters which are used locally.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
schema: Object, mapping of schema names to schema descriptions.
Returns:
Tuple (path_url, http_method, method_id, accept, max_size, media_path_url)
where:
- path_url is a String; the relative URL for the API method. Relative to
the API root, which is specified in the discovery document.
- http_method is a String; the HTTP method used to call the API method
described in the method description.
- method_id is a String; the name of the RPC method associated with the
API method, and is in the method description in the 'id' key.
- accept is a list of strings representing what content types are
accepted for media upload. Defaults to empty list if not in the
discovery document.
- max_size is a long representing the max size in bytes allowed for a
media upload. Defaults to 0L if not in the discovery document.
- media_path_url is a String; the absolute URI for media upload for the
API method. Constructed using the API root URI and service path from
the discovery document and the relative path for the API method. If
media upload is not supported, this is None.
"""
path_url = method_desc["path"]
http_method = method_desc["httpMethod"]
method_id = method_desc["id"]
parameters = _fix_up_parameters(method_desc, root_desc, http_method, schema)
# Order is important. `_fix_up_media_upload` needs `method_desc` to have a
# 'parameters' key and needs to know if there is a 'body' parameter because it
# also sets a 'media_body' parameter.
accept, max_size, media_path_url = _fix_up_media_upload(
method_desc, root_desc, path_url, parameters
)
return path_url, http_method, method_id, accept, max_size, media_path_url
def _urljoin(base, url):
"""Custom urljoin replacement supporting : before / in url."""
# In general, it's unsafe to simply join base and url. However, for
# the case of discovery documents, we know:
# * base will never contain params, query, or fragment
# * url will never contain a scheme or net_loc.
# In general, this means we can safely join on /; we just need to
# ensure we end up with precisely one / joining base and url. The
# exception here is the case of media uploads, where url will be an
# absolute url.
if url.startswith("http://") or url.startswith("https://"):
return urljoin(base, url)
new_base = base if base.endswith("/") else base + "/"
new_url = url[1:] if url.startswith("/") else url
return new_base + new_url
# TODO(dhermes): Convert this class to ResourceMethod and make it callable
class ResourceMethodParameters(object):
"""Represents the parameters associated with a method.
Attributes:
argmap: Map from method parameter name (string) to query parameter name
(string).
required_params: List of required parameters (represented by parameter
name as string).
repeated_params: List of repeated parameters (represented by parameter
name as string).
pattern_params: Map from method parameter name (string) to regular
expression (as a string). If the pattern is set for a parameter, the
value for that parameter must match the regular expression.
query_params: List of parameters (represented by parameter name as string)
that will be used in the query string.
path_params: Set of parameters (represented by parameter name as string)
that will be used in the base URL path.
param_types: Map from method parameter name (string) to parameter type. Type
can be any valid JSON schema type; valid values are 'any', 'array',
'boolean', 'integer', 'number', 'object', or 'string'. Reference:
http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1
enum_params: Map from method parameter name (string) to list of strings,
where each list of strings is the list of acceptable enum values.
"""
def __init__(self, method_desc):
"""Constructor for ResourceMethodParameters.
Sets default values and defers to set_parameters to populate.
Args:
method_desc: Dictionary with metadata describing an API method. Value
comes from the dictionary of methods stored in the 'methods' key in
the deserialized discovery document.
"""
self.argmap = {}
self.required_params = []
self.repeated_params = []
self.pattern_params = {}
self.query_params = []
# TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
# parsing is gotten rid of.
self.path_params = set()
self.param_types = {}
self.enum_params = {}
self.set_parameters(method_desc)
def set_parameters(self, method_desc):
"""Populates maps and lists based on method description.
Iterates through each parameter for the method and parses the values from
the parameter dictionary.
Args:
method_desc: Dictionary with metadata describing an API method. Value
comes from the dictionary of methods stored in the 'methods' key in
the deserialized discovery document.
"""
for arg, desc in six.iteritems(method_desc.get("parameters", {})):
param = key2param(arg)
self.argmap[param] = arg
if desc.get("pattern"):
self.pattern_params[param] = desc["pattern"]
if desc.get("enum"):
self.enum_params[param] = desc["enum"]
if desc.get("required"):
self.required_params.append(param)
if desc.get("repeated"):
self.repeated_params.append(param)
if desc.get("location") == "query":
self.query_params.append(param)
if desc.get("location") == "path":
self.path_params.add(param)
self.param_types[param] = desc.get("type", "string")
# TODO(dhermes): Determine if this is still necessary. Discovery based APIs
# should have all path parameters already marked with
# 'location: path'.
for match in URITEMPLATE.finditer(method_desc["path"]):
for namematch in VARNAME.finditer(match.group(0)):
name = key2param(namematch.group(0))
self.path_params.add(name)
if name in self.query_params:
self.query_params.remove(name)
def createMethod(methodName, methodDesc, rootDesc, schema):
"""Creates a method for attaching to a Resource.
Args:
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
rootDesc: object, the entire deserialized discovery document.
schema: object, mapping of schema names to schema descriptions.
"""
methodName = fix_method_name(methodName)
(
pathUrl,
httpMethod,
methodId,
accept,
maxSize,
mediaPathUrl,
) = _fix_up_method_description(methodDesc, rootDesc, schema)
parameters = ResourceMethodParameters(methodDesc)
def method(self, **kwargs):
# Don't bother with doc string, it will be over-written by createMethod.
for name in six.iterkeys(kwargs):
if name not in parameters.argmap:
raise TypeError('Got an unexpected keyword argument "%s"' % name)
# Remove args that have a value of None.
keys = list(kwargs.keys())
for name in keys:
if kwargs[name] is None:
del kwargs[name]
for name in parameters.required_params:
if name not in kwargs:
# temporary workaround for non-paging methods incorrectly requiring
# page token parameter (cf. drive.changes.watch vs. drive.changes.list)
if name not in _PAGE_TOKEN_NAMES or _findPageTokenName(
_methodProperties(methodDesc, schema, "response")
):
raise TypeError('Missing required parameter "%s"' % name)
for name, regex in six.iteritems(parameters.pattern_params):
if name in kwargs:
if isinstance(kwargs[name], six.string_types):
pvalues = [kwargs[name]]
else:
pvalues = kwargs[name]
for pvalue in pvalues:
if re.match(regex, pvalue) is None:
raise TypeError(
'Parameter "%s" value "%s" does not match the pattern "%s"'
% (name, pvalue, regex)
)
for name, enums in six.iteritems(parameters.enum_params):
if name in kwargs:
# We need to handle the case of a repeated enum
# name differently, since we want to handle both
# arg='value' and arg=['value1', 'value2']
if name in parameters.repeated_params and not isinstance(
kwargs[name], six.string_types
):
values = kwargs[name]
else:
values = [kwargs[name]]
for value in values:
if value not in enums:
raise TypeError(
'Parameter "%s" value "%s" is not an allowed value in "%s"'
% (name, value, str(enums))
)
actual_query_params = {}
actual_path_params = {}
for key, value in six.iteritems(kwargs):
to_type = parameters.param_types.get(key, "string")
# For repeated parameters we cast each member of the list.
if key in parameters.repeated_params and type(value) == type([]):
cast_value = [_cast(x, to_type) for x in value]
else:
cast_value = _cast(value, to_type)
if key in parameters.query_params:
actual_query_params[parameters.argmap[key]] = cast_value
if key in parameters.path_params:
actual_path_params[parameters.argmap[key]] = cast_value
body_value = kwargs.get("body", None)
media_filename = kwargs.get("media_body", None)
media_mime_type = kwargs.get("media_mime_type", None)
if self._developerKey:
actual_query_params["key"] = self._developerKey
model = self._model
if methodName.endswith("_media"):
model = MediaModel()
elif "response" not in methodDesc:
model = RawModel()
headers = {}
headers, params, query, body = model.request(
headers, actual_path_params, actual_query_params, body_value
)
expanded_url = uritemplate.expand(pathUrl, params)
url = _urljoin(self._baseUrl, expanded_url + query)
resumable = None
multipart_boundary = ""
if media_filename:
# Ensure we end up with a valid MediaUpload object.
if isinstance(media_filename, six.string_types):
if media_mime_type is None:
logger.warning(
"media_mime_type argument not specified: trying to auto-detect for %s",
media_filename,
)
media_mime_type, _ = mimetypes.guess_type(media_filename)
if media_mime_type is None:
raise UnknownFileType(media_filename)
if not mimeparse.best_match([media_mime_type], ",".join(accept)):
raise UnacceptableMimeTypeError(media_mime_type)
media_upload = MediaFileUpload(media_filename, mimetype=media_mime_type)
elif isinstance(media_filename, MediaUpload):
media_upload = media_filename
else:
raise TypeError("media_filename must be str or MediaUpload.")
# Check the maxSize
if media_upload.size() is not None and media_upload.size() > maxSize > 0:
raise MediaUploadSizeError("Media larger than: %s" % maxSize)
# Use the media path uri for media uploads
expanded_url = uritemplate.expand(mediaPathUrl, params)
url = _urljoin(self._baseUrl, expanded_url + query)
if media_upload.resumable():
url = _add_query_parameter(url, "uploadType", "resumable")
if media_upload.resumable():
# This is all we need to do for resumable, if the body exists it gets
# sent in the first request, otherwise an empty body is sent.
resumable = media_upload
else:
# A non-resumable upload
if body is None:
# This is a simple media upload
headers["content-type"] = media_upload.mimetype()
body = media_upload.getbytes(0, media_upload.size())
url = _add_query_parameter(url, "uploadType", "media")
else:
# This is a multipart/related upload.
msgRoot = MIMEMultipart("related")
# msgRoot should not write out it's own headers
setattr(msgRoot, "_write_headers", lambda self: None)
# attach the body as one part
msg = MIMENonMultipart(*headers["content-type"].split("/"))
msg.set_payload(body)
msgRoot.attach(msg)
# attach the media as the second part
msg = MIMENonMultipart(*media_upload.mimetype().split("/"))
msg["Content-Transfer-Encoding"] = "binary"
payload = media_upload.getbytes(0, media_upload.size())
msg.set_payload(payload)
msgRoot.attach(msg)
# encode the body: note that we can't use `as_string`, because
# it plays games with `From ` lines.
fp = BytesIO()
g = _BytesGenerator(fp, mangle_from_=False)
g.flatten(msgRoot, unixfrom=False)
body = fp.getvalue()
multipart_boundary = msgRoot.get_boundary()
headers["content-type"] = (
"multipart/related; " 'boundary="%s"'
) % multipart_boundary
url = _add_query_parameter(url, "uploadType", "multipart")
logger.debug("URL being requested: %s %s" % (httpMethod, url))
return self._requestBuilder(
self._http,
model.response,
url,
method=httpMethod,
body=body,
headers=headers,
methodId=methodId,
resumable=resumable,
)
docs = [methodDesc.get("description", DEFAULT_METHOD_DOC), "\n\n"]
if len(parameters.argmap) > 0:
docs.append("Args:\n")
# Skip undocumented params and params common to all methods.
skip_parameters = list(rootDesc.get("parameters", {}).keys())
skip_parameters.extend(STACK_QUERY_PARAMETERS)
all_args = list(parameters.argmap.keys())
args_ordered = [key2param(s) for s in methodDesc.get("parameterOrder", [])]
# Move body to the front of the line.
if "body" in all_args:
args_ordered.append("body")
for name in all_args:
if name not in args_ordered:
args_ordered.append(name)
for arg in args_ordered:
if arg in skip_parameters:
continue
repeated = ""
if arg in parameters.repeated_params:
repeated = " (repeated)"
required = ""
if arg in parameters.required_params:
required = " (required)"
paramdesc = methodDesc["parameters"][parameters.argmap[arg]]
paramdoc = paramdesc.get("description", "A parameter")
if "$ref" in paramdesc:
docs.append(
(" %s: object, %s%s%s\n The object takes the" " form of:\n\n%s\n\n")
% (
arg,
paramdoc,
required,
repeated,
schema.prettyPrintByName(paramdesc["$ref"]),
)
)
else:
paramtype = paramdesc.get("type", "string")
docs.append(
" %s: %s, %s%s%s\n" % (arg, paramtype, paramdoc, required, repeated)
)
enum = paramdesc.get("enum", [])
enumDesc = paramdesc.get("enumDescriptions", [])
if enum and enumDesc:
docs.append(" Allowed values\n")
for (name, desc) in zip(enum, enumDesc):
docs.append(" %s - %s\n" % (name, desc))
if "response" in methodDesc:
if methodName.endswith("_media"):
docs.append("\nReturns:\n The media object as a string.\n\n ")
else:
docs.append("\nReturns:\n An object of the form:\n\n ")
docs.append(schema.prettyPrintSchema(methodDesc["response"]))
setattr(method, "__doc__", "".join(docs))
return (methodName, method)
def createNextMethod(
methodName,
pageTokenName="pageToken",
nextPageTokenName="nextPageToken",
isPageTokenParameter=True,
):
"""Creates any _next methods for attaching to a Resource.
The _next methods allow for easy iteration through list() responses.
Args:
methodName: string, name of the method to use.
pageTokenName: string, name of request page token field.
nextPageTokenName: string, name of response page token field.
isPageTokenParameter: Boolean, True if request page token is a query
parameter, False if request page token is a field of the request body.
"""
methodName = fix_method_name(methodName)
def methodNext(self, previous_request, previous_response):
"""Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call 'execute()' on to request the next
page. Returns None if there are no more items in the collection.
"""
# Retrieve nextPageToken from previous_response
# Use as pageToken in previous_request to create new request.
nextPageToken = previous_response.get(nextPageTokenName, None)
if not nextPageToken:
return None
request = copy.copy(previous_request)
if isPageTokenParameter:
# Replace pageToken value in URI
request.uri = _add_query_parameter(
request.uri, pageTokenName, nextPageToken
)
logger.debug("Next page request URL: %s %s" % (methodName, request.uri))
else:
# Replace pageToken value in request body
model = self._model
body = model.deserialize(request.body)
body[pageTokenName] = nextPageToken
request.body = model.serialize(body)
logger.debug("Next page request body: %s %s" % (methodName, body))
return request
return (methodName, methodNext)
class Resource(object):
"""A class for interacting with a resource."""
def __init__(
self,
http,
baseUrl,
model,
requestBuilder,
developerKey,
resourceDesc,
rootDesc,
schema,
):
"""Build a Resource from the API description.
Args:
http: httplib2.Http, Object to make http requests with.
baseUrl: string, base URL for the API. All requests are relative to this
URI.
model: googleapiclient.Model, converts to and from the wire format.
requestBuilder: class or callable that instantiates an
googleapiclient.HttpRequest object.
developerKey: string, key obtained from
https://code.google.com/apis/console
resourceDesc: object, section of deserialized discovery document that
describes a resource. Note that the top level discovery document
is considered a resource.
rootDesc: object, the entire deserialized discovery document.
schema: object, mapping of schema names to schema descriptions.
"""
self._dynamic_attrs = []
self._http = http
self._baseUrl = baseUrl
self._model = model
self._developerKey = developerKey
self._requestBuilder = requestBuilder
self._resourceDesc = resourceDesc
self._rootDesc = rootDesc
self._schema = schema
self._set_service_methods()
def _set_dynamic_attr(self, attr_name, value):
"""Sets an instance attribute and tracks it in a list of dynamic attributes.
Args:
attr_name: string; The name of the attribute to be set
value: The value being set on the object and tracked in the dynamic cache.
"""
self._dynamic_attrs.append(attr_name)
self.__dict__[attr_name] = value
def __getstate__(self):
"""Trim the state down to something that can be pickled.
Uses the fact that the instance variable _dynamic_attrs holds attrs that
will be wiped and restored on pickle serialization.
"""
state_dict = copy.copy(self.__dict__)
for dynamic_attr in self._dynamic_attrs:
del state_dict[dynamic_attr]
del state_dict["_dynamic_attrs"]
return state_dict
def __setstate__(self, state):
"""Reconstitute the state of the object from being pickled.
Uses the fact that the instance variable _dynamic_attrs holds attrs that
will be wiped and restored on pickle serialization.
"""
self.__dict__.update(state)
self._dynamic_attrs = []
self._set_service_methods()
def __enter__(self):
return self
def __exit__(self, exc_type, exc, exc_tb):
self.close()
def close(self):
"""Close httplib2 connections."""
# httplib2 leaves sockets open by default.
# Cleanup using the `close` method.
# https://github.com/httplib2/httplib2/issues/148
self._http.http.close()
def _set_service_methods(self):
self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
self._add_next_methods(self._resourceDesc, self._schema)
def _add_basic_methods(self, resourceDesc, rootDesc, schema):
# If this is the root Resource, add a new_batch_http_request() method.
if resourceDesc == rootDesc:
batch_uri = "%s%s" % (
rootDesc["rootUrl"],
rootDesc.get("batchPath", "batch"),
)
def new_batch_http_request(callback=None):
"""Create a BatchHttpRequest object based on the discovery document.
Args:
callback: callable, A callback to be called for each response, of the
form callback(id, response, exception). The first parameter is the
request id, and the second is the deserialized response object. The
third is an apiclient.errors.HttpError exception object if an HTTP
error occurred while processing the request, or None if no error
occurred.
Returns:
A BatchHttpRequest object based on the discovery document.
"""
return BatchHttpRequest(callback=callback, batch_uri=batch_uri)
self._set_dynamic_attr("new_batch_http_request", new_batch_http_request)
# Add basic methods to Resource
if "methods" in resourceDesc:
for methodName, methodDesc in six.iteritems(resourceDesc["methods"]):
fixedMethodName, method = createMethod(
methodName, methodDesc, rootDesc, schema
)
self._set_dynamic_attr(
fixedMethodName, method.__get__(self, self.__class__)
)
# Add in _media methods. The functionality of the attached method will
# change when it sees that the method name ends in _media.
if methodDesc.get("supportsMediaDownload", False):
fixedMethodName, method = createMethod(
methodName + "_media", methodDesc, rootDesc, schema
)
self._set_dynamic_attr(
fixedMethodName, method.__get__(self, self.__class__)
)
def _add_nested_resources(self, resourceDesc, rootDesc, schema):
# Add in nested resources
if "resources" in resourceDesc:
def createResourceMethod(methodName, methodDesc):
"""Create a method on the Resource to access a nested Resource.
Args:
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
"""
methodName = fix_method_name(methodName)
def methodResource(self):
return Resource(
http=self._http,
baseUrl=self._baseUrl,
model=self._model,
developerKey=self._developerKey,
requestBuilder=self._requestBuilder,
resourceDesc=methodDesc,
rootDesc=rootDesc,
schema=schema,
)
setattr(methodResource, "__doc__", "A collection resource.")
setattr(methodResource, "__is_resource__", True)
return (methodName, methodResource)
for methodName, methodDesc in six.iteritems(resourceDesc["resources"]):
fixedMethodName, method = createResourceMethod(methodName, methodDesc)
self._set_dynamic_attr(
fixedMethodName, method.__get__(self, self.__class__)
)
def _add_next_methods(self, resourceDesc, schema):
# Add _next() methods if and only if one of the names 'pageToken' or
# 'nextPageToken' occurs among the fields of both the method's response
# type either the method's request (query parameters) or request body.
if "methods" not in resourceDesc:
return
for methodName, methodDesc in six.iteritems(resourceDesc["methods"]):
nextPageTokenName = _findPageTokenName(
_methodProperties(methodDesc, schema, "response")
)
if not nextPageTokenName:
continue
isPageTokenParameter = True
pageTokenName = _findPageTokenName(methodDesc.get("parameters", {}))
if not pageTokenName:
isPageTokenParameter = False
pageTokenName = _findPageTokenName(
_methodProperties(methodDesc, schema, "request")
)
if not pageTokenName:
continue
fixedMethodName, method = createNextMethod(
methodName + "_next",
pageTokenName,
nextPageTokenName,
isPageTokenParameter,
)
self._set_dynamic_attr(
fixedMethodName, method.__get__(self, self.__class__)
)
def _findPageTokenName(fields):
"""Search field names for one like a page token.
Args:
fields: container of string, names of fields.
Returns:
First name that is either 'pageToken' or 'nextPageToken' if one exists,
otherwise None.
"""
return next(
(tokenName for tokenName in _PAGE_TOKEN_NAMES if tokenName in fields), None
)
def _methodProperties(methodDesc, schema, name):
"""Get properties of a field in a method description.
Args:
methodDesc: object, fragment of deserialized discovery document that
describes the method.
schema: object, mapping of schema names to schema descriptions.
name: string, name of top-level field in method description.
Returns:
Object representing fragment of deserialized discovery document
corresponding to 'properties' field of object corresponding to named field
in method description, if it exists, otherwise empty dict.
"""
desc = methodDesc.get(name, {})
if "$ref" in desc:
desc = schema.get(desc["$ref"], {})
return desc.get("properties", {})
| [
"[email protected]"
] | |
c49cb629b81dd8bab875ff2f9d3dbd0a5ce2d44e | fea2eff6ed6ff05879e071d52d978b1f2f322f31 | /TensorFlow深度学习应用实践_源代码/08/8-1.py | e433dd823a9c612a94b2b30fa227c819242e8df1 | [] | no_license | GetMyPower/mypython | 71ec8db85c82e33b893c5d53ac64a007951fd8f0 | 1846148e327e7d14ebb96c9fea4b47aa61762a69 | refs/heads/master | 2022-03-22T08:11:56.113905 | 2019-12-20T15:00:23 | 2019-12-20T15:00:23 | 198,230,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | import tensorflow as tf
input1 = tf.constant(1)
print(input1)
input2 = tf.Variable(2,tf.int32)
print(input2)
input2 = input1
sess = tf.Session()
print(sess.run(input2))
| [
"[email protected]"
] | |
5e0b811355d69d98aa4f260aafc4dacef13b4dd2 | 95fb4b8e51dd7a852d9ed9de067c1e929a71404a | /colour/models/rgb/datasets/sony.py | ed1b14ee7367d8cae14cb322f5cd81df68be3c15 | [
"BSD-3-Clause"
] | permissive | zachlewis/colour | e9b1f6978dae157306536492a652b2fb9588894a | c248e2913d6c62658e4892e5bc8503d86ed5d9ab | refs/heads/develop | 2021-04-25T22:08:34.617007 | 2020-11-29T23:18:48 | 2020-11-29T23:18:48 | 109,490,837 | 0 | 0 | BSD-3-Clause | 2019-10-28T19:44:56 | 2017-11-04T11:53:24 | Python | UTF-8 | Python | false | false | 11,336 | py | # -*- coding: utf-8 -*-
"""
Sony Colourspaces
=================
Defines the *Sony* colourspaces:
- :attr:`colour.models.RGB_COLOURSPACE_S_GAMUT`.
- :attr:`colour.models.RGB_COLOURSPACE_S_GAMUT3`.
- :attr:`colour.models.RGB_COLOURSPACE_S_GAMUT3_CINE`.
- :attr:`colour.models.RGB_COLOURSPACE_VENICE_S_GAMUT3`.
- :attr:`colour.models.RGB_COLOURSPACE_VENICE_S_GAMUT3_CINE`.
Notes
-----
- The *Venice S-Gamut3* and *Venice S-Gamut3.Cine* primaries and whitepoint
were derived with the following `Google Colab Notebook \
<https://colab.research.google.com/drive/1ZGTij7jT8eZRMPUkyWlv_x5ix5Q5twMB>`__.
References
----------
- :cite:`Gaggioni` : Gaggioni, H., Dhanendra, P., Yamashita, J., Kawada, N.,
Endo, K., & Clark, C. (n.d.). S-Log: A new LUT for digital production
mastering and interchange applications (Vol. 709, pp. 1-13).
http://pro.sony.com/bbsccms/assets/files/mkt/cinema/solutions/slog_manual.pdf
- :cite:`SonyCorporation` : Sony Corporation. (n.d.). S-Log Whitepaper (pp.
1-17). http://www.theodoropoulos.info/attachments/076_on%20S-Log.pdf
- :cite:`SonyCorporationd` : Sony Corporation. (n.d.). Technical Summary
for S-Gamut3.Cine/S-Log3 and S-Gamut3/S-Log3 (pp. 1-7).
http://community.sony.com/sony/attachments/sony/\
large-sensor-camera-F5-F55/12359/2/\
TechnicalSummary_for_S-Gamut3Cine_S-Gamut3_S-Log3_V1_00.pdf
- :cite:`SonyCorporatione` : Sony Corporation. (n.d.).
S-Gamut3_S-Gamut3Cine_Matrix.xlsx.
https://community.sony.com/sony/attachments/sony/\
large-sensor-camera-F5-F55/12359/3/S-Gamut3_S-Gamut3Cine_Matrix.xlsx
- :cite:`SonyElectronicsCorporation2020` : Sony Electronics Corporation.
(2020). IDT.Sony.Venice_SLog3_SGamut3.ctl. https://github.com/ampas/\
aces-dev/blob/710ecbe52c87ce9f4a1e02c8ddf7ea0d6b611cc8/transforms/ctl/idt/\
vendorSupplied/sony/IDT.Sony.Venice_SLog3_SGamut3.ctl
- :cite:`SonyElectronicsCorporation2020a` : Sony Electronics Corporation.
(2020). IDT.Sony.Venice_SLog3_SGamut3Cine.ctl. https://github.com/ampas/\
aces-dev/blob/710ecbe52c87ce9f4a1e02c8ddf7ea0d6b611cc8/transforms/ctl/idt/\
vendorSupplied/sony/IDT.Sony.Venice_SLog3_SGamut3Cine.ctl
"""
from __future__ import division, unicode_literals
import numpy as np
from colour.colorimetry import CCS_ILLUMINANTS
from colour.models.rgb import (RGB_Colourspace, log_encoding_SLog2,
log_decoding_SLog2, log_encoding_SLog3,
log_decoding_SLog3, normalised_primary_matrix)
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2020 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = '[email protected]'
__status__ = 'Production'
__all__ = [
'PRIMARIES_S_GAMUT', 'WHITEPOINT_NAME_S_GAMUT', 'CCS_WHITEPOINT_S_GAMUT',
'MATRIX_S_GAMUT_TO_XYZ', 'MATRIX_XYZ_TO_S_GAMUT',
'RGB_COLOURSPACE_S_GAMUT', 'PRIMARIES_S_GAMUT3',
'WHITEPOINT_NAME_S_GAMUT3', 'CCS_WHITEPOINT_S_GAMUT3',
'MATRIX_S_GAMUT3_TO_XYZ', 'MATRIX_XYZ_TO_S_GAMUT3',
'RGB_COLOURSPACE_S_GAMUT3', 'PRIMARIES_S_GAMUT3_CINE',
'WHITEPOINT_NAME_S_GAMUT3_CINE', 'CCS_WHITEPOINT_S_GAMUT3_CINE',
'MATRIX_S_GAMUT3_CINE_TO_XYZ', 'MATRIX_XYZ_TO_S_GAMUT3_CINE',
'RGB_COLOURSPACE_S_GAMUT3_CINE', 'PRIMARIES_VENICE_S_GAMUT3',
'WHITEPOINT_NAME_VENICE_S_GAMUT3', 'CCS_WHITEPOINT_VENICE_S_GAMUT3',
'MATRIX_VENICE_S_GAMUT3_TO_XYZ', 'MATRIX_XYZ_TO_VENICE_S_GAMUT3',
'RGB_COLOURSPACE_VENICE_S_GAMUT3', 'PRIMARIES_VENICE_S_GAMUT3_CINE',
'WHITEPOINT_NAME_VENICE_S_GAMUT3_CINE',
'CCS_WHITEPOINT_VENICE_S_GAMUT3_CINE',
'MATRIX_VENICE_S_GAMUT3_CINE_TO_XYZ', 'MATRIX_XYZ_TO_VENICE_S_GAMUT3_CINE',
'RGB_COLOURSPACE_VENICE_S_GAMUT3_CINE'
]
PRIMARIES_S_GAMUT = np.array([
[0.7300, 0.2800],
[0.1400, 0.8550],
[0.1000, -0.0500],
])
"""
*S-Gamut* colourspace primaries.
PRIMARIES_S_GAMUT : ndarray, (3, 2)
"""
WHITEPOINT_NAME_S_GAMUT = 'D65'
"""
*S-Gamut* colourspace whitepoint name.
WHITEPOINT_NAME_S_GAMUT : unicode
"""
CCS_WHITEPOINT_S_GAMUT = (CCS_ILLUMINANTS[
'CIE 1931 2 Degree Standard Observer'][WHITEPOINT_NAME_S_GAMUT])
"""
*S-Gamut* colourspace whitepoint chromaticity coordinates.
CCS_WHITEPOINT_S_GAMUT : ndarray
"""
MATRIX_S_GAMUT_TO_XYZ = np.array([
[0.7064827132, 0.1288010498, 0.1151721641],
[0.2709796708, 0.7866064112, -0.0575860820],
[-0.0096778454, 0.0046000375, 1.0941355587],
])
"""
*S-Gamut* colourspace to *CIE XYZ* tristimulus values matrix.
MATRIX_S_GAMUT_TO_XYZ : array_like, (3, 3)
"""
MATRIX_XYZ_TO_S_GAMUT = np.array([
[1.5073998991, -0.2458221374, -0.1716116808],
[-0.5181517271, 1.3553912409, 0.1258786682],
[0.0155116982, -0.0078727714, 0.9119163656],
])
"""
*CIE XYZ* tristimulus values to *S-Gamut* colourspace matrix.
MATRIX_XYZ_TO_S_GAMUT : array_like, (3, 3)
"""
RGB_COLOURSPACE_S_GAMUT = RGB_Colourspace(
'S-Gamut',
PRIMARIES_S_GAMUT,
CCS_WHITEPOINT_S_GAMUT,
WHITEPOINT_NAME_S_GAMUT,
MATRIX_S_GAMUT_TO_XYZ,
MATRIX_XYZ_TO_S_GAMUT,
log_encoding_SLog2,
log_decoding_SLog2,
)
RGB_COLOURSPACE_S_GAMUT.__doc__ = """
*S-Gamut* colourspace.
References
----------
:cite:`Gaggioni`, :cite:`SonyCorporation`
RGB_COLOURSPACE_S_GAMUT : RGB_Colourspace
"""
PRIMARIES_S_GAMUT3 = PRIMARIES_S_GAMUT
"""
*S-Gamut3* colourspace primaries.
PRIMARIES_S_GAMUT3 : ndarray, (3, 2)
"""
WHITEPOINT_NAME_S_GAMUT3 = WHITEPOINT_NAME_S_GAMUT
"""
*S-Gamut3* colourspace whitepoint name.
WHITEPOINT_NAME_S_GAMUT3 : unicode
"""
CCS_WHITEPOINT_S_GAMUT3 = CCS_WHITEPOINT_S_GAMUT
"""
*S-Gamut3* colourspace whitepoint chromaticity coordinates.
CCS_WHITEPOINT_S_GAMUT3 : ndarray
"""
MATRIX_S_GAMUT3_TO_XYZ = MATRIX_S_GAMUT_TO_XYZ
"""
*S-Gamut3* colourspace to *CIE XYZ* tristimulus values matrix.
MATRIX_S_GAMUT3_TO_XYZ : array_like, (3, 3)
"""
MATRIX_XYZ_TO_S_GAMUT3 = MATRIX_XYZ_TO_S_GAMUT
"""
*CIE XYZ* tristimulus values to *S-Gamut3* colourspace matrix.
MATRIX_XYZ_TO_S_GAMUT3 : array_like, (3, 3)
"""
RGB_COLOURSPACE_S_GAMUT3 = RGB_Colourspace(
'S-Gamut3',
PRIMARIES_S_GAMUT3,
CCS_WHITEPOINT_S_GAMUT3,
WHITEPOINT_NAME_S_GAMUT3,
MATRIX_S_GAMUT3_TO_XYZ,
MATRIX_XYZ_TO_S_GAMUT3,
log_encoding_SLog3,
log_decoding_SLog3,
)
RGB_COLOURSPACE_S_GAMUT3.__doc__ = """
*S-Gamut3* colourspace.
References
----------
:cite:`SonyCorporationd`
RGB_COLOURSPACE_S_GAMUT3 : RGB_Colourspace
"""
PRIMARIES_S_GAMUT3_CINE = np.array([
[0.76600, 0.27500],
[0.22500, 0.80000],
[0.08900, -0.08700],
])
"""
*S-Gamut3.Cine* colourspace primaries.
PRIMARIES_S_GAMUT3_CINE : ndarray, (3, 2)
"""
WHITEPOINT_NAME_S_GAMUT3_CINE = WHITEPOINT_NAME_S_GAMUT
"""
*S-Gamut3.Cine* colourspace whitepoint name.
WHITEPOINT_NAME_S_GAMUT3_CINE : unicode
"""
CCS_WHITEPOINT_S_GAMUT3_CINE = CCS_WHITEPOINT_S_GAMUT
"""
*S-Gamut3.Cine* colourspace whitepoint chromaticity coordinates.
CCS_WHITEPOINT_S_GAMUT3_CINE : ndarray
"""
MATRIX_S_GAMUT3_CINE_TO_XYZ = np.array([
[0.5990839208, 0.2489255161, 0.1024464902],
[0.2150758201, 0.8850685017, -0.1001443219],
[-0.0320658495, -0.0276583907, 1.1487819910],
])
"""
*S-Gamut3.Cine* colourspace to *CIE XYZ* tristimulus values matrix.
MATRIX_S_GAMUT3_CINE_TO_XYZ : array_like, (3, 3)
"""
MATRIX_XYZ_TO_S_GAMUT3_CINE = np.array([
[1.8467789693, -0.5259861230, -0.2105452114],
[-0.4441532629, 1.2594429028, 0.1493999729],
[0.0408554212, 0.0156408893, 0.8682072487],
])
"""
*CIE XYZ* tristimulus values to *S-Gamut3.Cine* colourspace matrix.
MATRIX_XYZ_TO_S_GAMUT3_CINE : array_like, (3, 3)
"""
RGB_COLOURSPACE_S_GAMUT3_CINE = RGB_Colourspace(
'S-Gamut3.Cine',
PRIMARIES_S_GAMUT3_CINE,
CCS_WHITEPOINT_S_GAMUT3_CINE,
WHITEPOINT_NAME_S_GAMUT3_CINE,
MATRIX_S_GAMUT3_CINE_TO_XYZ,
MATRIX_XYZ_TO_S_GAMUT3_CINE,
log_encoding_SLog3,
log_decoding_SLog3,
)
RGB_COLOURSPACE_S_GAMUT3_CINE.__doc__ = """
*S-Gamut3.Cine* colourspace.
References
----------
:cite:`SonyCorporatione`
RGB_COLOURSPACE_S_GAMUT3_CINE : RGB_Colourspace
"""
PRIMARIES_VENICE_S_GAMUT3 = np.array([
[0.740464264304292, 0.279364374750660],
[0.089241145423286, 0.893809528608105],
[0.110488236673827, -0.052579333080476],
])
"""
*Venice S-Gamut3* colourspace primaries.
PRIMARIES_VENICE_S_GAMUT3 : ndarray, (3, 2)
"""
WHITEPOINT_NAME_VENICE_S_GAMUT3 = WHITEPOINT_NAME_S_GAMUT
"""
*Venice S-Gamut3* colourspace whitepoint name.
WHITEPOINT_NAME_VENICE_S_GAMUT3 : unicode
"""
CCS_WHITEPOINT_VENICE_S_GAMUT3 = CCS_WHITEPOINT_S_GAMUT
"""
*Venice S-Gamut3* colourspace whitepoint chromaticity coordinates.
CCS_WHITEPOINT_VENICE_S_GAMUT3 : ndarray
"""
MATRIX_VENICE_S_GAMUT3_TO_XYZ = normalised_primary_matrix(
PRIMARIES_VENICE_S_GAMUT3, CCS_WHITEPOINT_VENICE_S_GAMUT3)
"""
*Venice S-Gamut3* colourspace to *CIE XYZ* tristimulus values matrix.
MATRIX_VENICE_S_GAMUT3_TO_XYZ : array_like, (3, 3)
"""
MATRIX_XYZ_TO_VENICE_S_GAMUT3 = np.linalg.inv(MATRIX_VENICE_S_GAMUT3_TO_XYZ)
"""
*CIE XYZ* tristimulus values to *Venice S-Gamut3* colourspace matrix.
MATRIX_XYZ_TO_VENICE_S_GAMUT3 : array_like, (3, 3)
"""
RGB_COLOURSPACE_VENICE_S_GAMUT3 = RGB_Colourspace(
'Venice S-Gamut3',
PRIMARIES_VENICE_S_GAMUT3,
CCS_WHITEPOINT_VENICE_S_GAMUT3,
WHITEPOINT_NAME_VENICE_S_GAMUT3,
MATRIX_VENICE_S_GAMUT3_TO_XYZ,
MATRIX_XYZ_TO_VENICE_S_GAMUT3,
log_encoding_SLog3,
log_decoding_SLog3,
)
RGB_COLOURSPACE_VENICE_S_GAMUT3.__doc__ = """
*Venice S-Gamut3* colourspace.
References
----------
:cite:`SonyElectronicsCorporation2020`
RGB_COLOURSPACE_VENICE_S_GAMUT3 : RGB_Colourspace
"""
PRIMARIES_VENICE_S_GAMUT3_CINE = np.array([
[0.775901871567345, 0.274502392854799],
[0.188682902773355, 0.828684937020288],
[0.101337382499301, -0.089187517306263],
])
"""
*Venice S-Gamut3.Cine* colourspace primaries.
PRIMARIES_VENICE_S_GAMUT3_CINE : ndarray, (3, 2)
"""
WHITEPOINT_NAME_VENICE_S_GAMUT3_CINE = WHITEPOINT_NAME_S_GAMUT
"""
*Venice S-Gamut3.Cine* colourspace whitepoint name.
WHITEPOINT_NAME_VENICE_S_GAMUT3_CINE : unicode
"""
CCS_WHITEPOINT_VENICE_S_GAMUT3_CINE = CCS_WHITEPOINT_S_GAMUT
"""
*Venice S-Gamut3.Cine* colourspace whitepoint chromaticity coordinates.
CCS_WHITEPOINT_VENICE_S_GAMUT3_CINE : ndarray
"""
MATRIX_VENICE_S_GAMUT3_CINE_TO_XYZ = normalised_primary_matrix(
PRIMARIES_VENICE_S_GAMUT3_CINE, CCS_WHITEPOINT_VENICE_S_GAMUT3_CINE)
"""
*Venice S-Gamut3.Cine* colourspace to *CIE XYZ* tristimulus values matrix.
MATRIX_VENICE_S_GAMUT3_CINE_TO_XYZ : array_like, (3, 3)
"""
MATRIX_XYZ_TO_VENICE_S_GAMUT3_CINE = np.linalg.inv(
MATRIX_VENICE_S_GAMUT3_CINE_TO_XYZ)
"""
*CIE XYZ* tristimulus values to *Venice S-Gamut3.Cine* colourspace matrix.
MATRIX_XYZ_TO_VENICE_S_GAMUT3_CINE : array_like, (3, 3)
"""
RGB_COLOURSPACE_VENICE_S_GAMUT3_CINE = RGB_Colourspace(
'Venice S-Gamut3.Cine',
PRIMARIES_VENICE_S_GAMUT3_CINE,
CCS_WHITEPOINT_VENICE_S_GAMUT3_CINE,
WHITEPOINT_NAME_VENICE_S_GAMUT3_CINE,
MATRIX_VENICE_S_GAMUT3_CINE_TO_XYZ,
MATRIX_XYZ_TO_VENICE_S_GAMUT3_CINE,
log_encoding_SLog3,
log_decoding_SLog3,
)
RGB_COLOURSPACE_VENICE_S_GAMUT3_CINE.__doc__ = """
*Venice S-Gamut3.Cine* colourspace.
References
----------
:cite:`SonyElectronicsCorporation2020a`
RGB_COLOURSPACE_VENICE_S_GAMUT3_CINE : RGB_Colourspace
"""
| [
"[email protected]"
] | |
27791dff47ce4d430b69660ad95df2783f3233fd | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/coverage-big-3078.py | 47f6110342d0bc97ffe7055c5477a7037e7b6e28 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,339 | py | count:int = 0
count2:int = 0
count3:int = 0
count4:int = 0
count5:int = 0
def foo(s: str) -> int:
return len(s)
def foo2(s: str, s2: str) -> int:
return len(s)
def foo3(s: str, s2: str, s3: str) -> int:
return len(s)
def foo4(s: str, s2: str, s3: str, s4: str) -> int:
return len(s)
def foo5(s: str, s2: str, s3: str, s4: str, s5: str) -> int:
return len(s)
class bar(object):
p: bool = True
def baz(self:"bar", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar2(object):
p: bool = True
p2: bool = True
def baz(self:"bar2", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar2", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar3(object):
p: bool = True
p2: bool = True
p3: bool = True
def baz(self:"bar3", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar3", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar3", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar4(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
def baz(self:"bar4", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar4", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar4", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar4", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
$Block
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
class bar5(object):
p: bool = True
p2: bool = True
p3: bool = True
p4: bool = True
p5: bool = True
def baz(self:"bar5", xx: [int]) -> str:
global count
x:int = 0
y:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz2(self:"bar5", xx: [int], xx2: [int]) -> str:
global count
x:int = 0
x2:int = 0
y:int = 1
y2:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz3(self:"bar5", xx: [int], xx2: [int], xx3: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
y:int = 1
y2:int = 1
y3:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz4(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
def baz5(self:"bar5", xx: [int], xx2: [int], xx3: [int], xx4: [int], xx5: [int]) -> str:
global count
x:int = 0
x2:int = 0
x3:int = 0
x4:int = 0
x5:int = 0
y:int = 1
y2:int = 1
y3:int = 1
y4:int = 1
y5:int = 1
def qux(y: int) -> object:
nonlocal x
if x > y:
x = -1
def qux2(y: int, y2: int) -> object:
nonlocal x
nonlocal x2
if x > y:
x = -1
def qux3(y: int, y2: int, y3: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
if x > y:
x = -1
def qux4(y: int, y2: int, y3: int, y4: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
if x > y:
x = -1
def qux5(y: int, y2: int, y3: int, y4: int, y5: int) -> object:
nonlocal x
nonlocal x2
nonlocal x3
nonlocal x4
nonlocal x5
if x > y:
x = -1
for x in xx:
self.p = x == 2
qux(0) # Yay! ChocoPy
count = count + 1
while x <= 0:
if self.p:
xx[0] = xx[1]
self.p = not self.p
x = x + 1
elif foo("Long"[0]) == 1:
self.p = self is None
return "Nope"
print(bar().baz([1,2]))
| [
"[email protected]"
] | |
8e0cd4727a216f881c84d55625a70efbdcadb46d | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_287/ch150_2020_04_13_20_45_30_391205.py | d44700ac9eb31e56af69bcc4d9db551fc97ab291 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py | import math
def calcula_pi(n):
if n == 1:
p = (6**(1/2))
return p
p = 0
valores = list(range(n))
valores.remove(0)
for a in (valores):
p += (6/(a**2))
p = (p**(1/2))
return p | [
"[email protected]"
] | |
d78a7f97e2bbf295b699f32b08fc0480aa10688a | 67ae1b00411ad63726e0abb07ba82ac5b75fc32a | /findmusician/wsgi.py | e3e754c495255bd2fa5792053e4e437c249d3059 | [] | no_license | SimonKorzonek/findmusician | e40429bf45115de0709ef6fe92ace3c5cd195660 | fc23e0d6b5da7d98423accef5eb82b9b6c5516bc | refs/heads/main | 2023-02-15T10:12:02.070458 | 2021-01-05T23:02:05 | 2021-01-05T23:02:05 | 327,074,301 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | """
WSGI config for findmusician project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'findmusician.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
c4463b466523f98a0389beff01c3891c2fefadb3 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/templates/_algorithms_challenges/leetcode/LeetcodePythonProject_with_solution/leetcode_0751_0800/LeetCode792_NumberOfMatchingSubsequences.py | ce718c54dd28669b15ae5ae32138582fbd1dc330 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 1,018 | py | '''
Created on Apr 16, 2018
@author: tongq
'''
c_ Solution(o..
___ numMatchingSubseq S, words
"""
:type S: str
:type words: List[str]
:rtype: int
"""
hashmap # dict
___ i __ r..(26
c chr(o..('a')+i)
hashmap[c] # list
___ word __ words:
hashmap[word[0]].a..(word)
count 0
___ c __ S:
d.. hashmap[c]
size l..(d..)
___ i __ r..(size
word d...p.. 0)
__ l.. ? __ 1:
count += 1
____
hashmap[word[1]].a..(word[1:])
r.. count
___ test
testCases [
[
'abcde',
["a", "bb", "acd", "ace"],
],
]
___ s, words __ testCases:
result numMatchingSubseq(s, words)
print('result: %s' % result)
print('-='*30+'-')
__ _____ __ _____
Solution().test()
| [
"[email protected]"
] | |
f8e7c4835096c2301aac6f202b1a28fee2bab730 | 4c984a318ccf26e765f902669399da66497e194d | /pollexe/urls.py | 5ed934d9e6f6c6c24682d62a19f5786bdf6c0416 | [] | no_license | sajalmia381/pollexe | 914af663bad6becb4308c738a16240028f37f99b | 3ead47fee43855aba1ee0f4c2b3f222cac6a9a68 | refs/heads/master | 2020-04-21T12:42:49.283843 | 2019-02-07T13:43:40 | 2019-02-07T13:43:40 | 169,572,196 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 728 | py | from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('', include('account.urls', namespace='account')),
path('', include('page.urls', namespace='page')),
path('', include('blog.urls', namespace='blog')),
path('', include('product.urls', namespace='product')),
# admin
path('admin/', admin.site.urls),
# Third party
path('summernote/', include('django_summernote.urls')),
path('front-edit/', include('front.urls')),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"[email protected]"
] | |
3ab3e27fb739a45761ef77b83f03b45a6dab15f9 | b00efc53bec9b05f91703db81387325fae0a771e | /MAI/olymp/17.02.05/a.py | 364918dd1e8451ebddaa61670614cbf7012cf250 | [] | no_license | 21zaber/MAI | ac88eb1dd4b8f6b9d184527a3b1824a05993a9e1 | 45f25bdd5996329fd05f3e0ec7eb1289443f17b5 | refs/heads/master | 2021-01-17T07:12:22.303754 | 2018-02-08T15:05:30 | 2018-02-08T15:05:30 | 54,101,933 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 832 | py | n = int(input())
q = n+3
t = [[[[0 for i in range(q)] for i in range(q)] for i in range(q)] for i in range(n+1)]
# len vs max last
t[1][0][0][0] = 1
for l in range(1, n):
for vs in range(l+1):
for mx in range(l):
for lst in range(mx+1):
c = 0
if t[l][vs][mx][lst] == 0:
continue
for i in range(vs+2):
if i <= lst:
t[l+1][vs][mx][i] += t[l][vs][mx][lst]
c+=1
elif i >= mx:
t[l+1][vs+1][i][i] += t[l][vs][mx][lst]
c+=1
#print('l: {}, vs: {}, m: {}, lst: {}, c: {}'.format(l, vs, mx, lst, c))
ans = 0
for i in t[-1]:
for j in i:
for k in j:
ans += k
print(ans)
| [
"[email protected]"
] | |
1ecf217ac3f73bc4e4f65a2a705ed8b490973479 | 155b6c640dc427590737750fe39542a31eda2aa4 | /api-test/easyloan/testAction/loginAction.py | 1ffce9a91563013b011f796bed0bf0a925d88370 | [] | no_license | RomySaber/api-test | d4b3add00e7e5ed70a5c72bb38dc010f67bbd981 | 028c9f7fe0d321db2af7f1cb936c403194db850c | refs/heads/master | 2022-10-09T18:42:43.352325 | 2020-06-11T07:00:04 | 2020-06-11T07:00:04 | 271,468,744 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,582 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time :2019-04-23 下午 2:33
@Author : 罗林
@File : loginAction.py
@desc :
"""
import requests
import json
from common.myConfig import ConfigUtils as conf
from common.myCommon.Logger import getlog
from common.myConfig import MysqlConfig as ms
from common.mydb import MysqlClent
User = 'fqhd001'
Passwd = '5e81f67ed14a5443ec6a3682513f0b9b'
mobile = '13699479886'
app_passwd = 'll123456'
DB = MysqlClent.get_conn('192.168.15.159', 3306, 'easyloan', 'easyloan', '78dk.com')
web_URL = ms.get('easyloan_web_apiURL')
app_URL = ms.get('easyloan_app_apiURL')
LOGGER = getlog(__name__)
API_TEST_HEADERS = {"Content-Type": "application/json"}
rq = requests.Session()
sign = conf.get('report', 'sign')
def test_easyloan_web_login():
url = web_URL + '/api/78dk/web/login'
querystring = json.dumps({"username": User, "password": Passwd})
response = rq.post(url, headers=API_TEST_HEADERS, data=querystring)
LOGGER.info("token:【{}】".format(response.json()["data"]["token"]))
return response.json()["data"]["token"]
def test_easyloan_app_login():
url = app_URL + '/api/78dk/clientapp/login/pwLogin'
querystring = json.dumps({"mobile": mobile, "password": app_passwd})
response = rq.post(url, headers=API_TEST_HEADERS, data=querystring)
LOGGER.info("token:【{}】".format(response.json()["data"]["token"]))
LOGGER.info(response.text)
return response.json()["data"]["token"]
def test_yygl_login():
pass
if __name__ == '__main__':
test_easyloan_app_login()
| [
"[email protected]"
] | |
4852b83b2264cd75b2dfc36bc578fc47b1f9e399 | cf5efed6bc1e9bd27f94663d2443c6bdd1cb472a | /1-pack_web_static.py | 1688b66bfe9161d9b0827db23d9332f8638567fd | [] | no_license | yulyzulu/AirBnB_clone_v2 | 593db702ede02ac17b6883b3e99b6e1eb36a33ee | 1a40aec60996dc98ad9ff45f5e1224816ff6735b | refs/heads/master | 2021-05-25T15:33:22.100621 | 2020-04-23T23:23:25 | 2020-04-23T23:23:25 | 253,810,650 | 0 | 0 | null | 2020-04-07T14:02:36 | 2020-04-07T14:02:35 | null | UTF-8 | Python | false | false | 682 | py | #!/usr/bin/python3
"""Module that execute functions"""
from fabric.api import local
from fabric.decorators import runs_once
from datetime import datetime
from os.path import getsize
@runs_once
def do_pack():
local("mkdir -p versions")
date_time = datetime.now().strftime("%Y%m%d%H%M%S")
command = local("tar -cvzf versions/web_static_{}.tgz ./web_stat\
ic".format(date_time))
if command.succeeded:
size = getsize('versions/web_static_{}.tgz'.format(date_time))
print("web_static packed: versions/web_static_{}.tgz -> {}Byt\
es".format(date_time, size))
return ('versions/web_static_{}.tgz'.format(date_time))
else:
return None
| [
"[email protected]"
] | |
6f92dc3b1e46aec56a6ea497917e884f922966d1 | a23ec1e8470f87d1b3fa34b01506d6bdd63f6569 | /algorithms/967. Numbers With Same Consecutive Differences.py | 3750ace278b12334a762bdf37e95b48783d3f618 | [] | no_license | xiaohai0520/Algorithm | ae41d2137e085a30b2ac1034b8ea00e6c9de3ef1 | 96945ffadd893c1be60c3bde70e1f1cd51edd834 | refs/heads/master | 2023-04-14T17:41:21.918167 | 2021-04-20T13:57:09 | 2021-04-20T13:57:09 | 156,438,761 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 819 | py | Dfs problem.
each time we add one digit, we make sure it satifiy the condition.
Code:
class Solution:
def numsSameConsecDiff(self, N, K):
"""
:type N: int
:type K: int
:rtype: List[int]
"""
if N == 1:
return [i for i in range(10)]
if K == 0:
return list(map(int,[str(i)*N for i in range(1,10)]))
res = []
def dfs(path,l):
if l == N:
res.append(path)
return
cur = path % 10
if cur + K < 10:
dfs(path * 10 + cur + K, l + 1)
if cur - K >= 0:
dfs(path * 10 + cur - K, l + 1)
for i in range(1,10):
dfs(i,1)
return res
| [
"[email protected]"
] | |
fd340134c630935c8dff1c7e83d8d2b1a4bd61dc | fcdfe976c9ed60b18def889692a17dc18a8dd6d7 | /python/qt/close_dialog.py | 732b8043635aa9a35802bd6867ad50d908c18473 | [] | no_license | akihikoy/ay_test | 4907470889c9bda11cdc84e8231ef3156fda8bd7 | a24dfb720960bfedb94be3b4d147e37616e7f39a | refs/heads/master | 2023-09-02T19:24:47.832392 | 2023-08-27T06:45:20 | 2023-08-27T06:45:20 | 181,903,332 | 6 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,098 | py | #!/usr/bin/python
#\file close_dialog.py
#\brief certain python script
#\author Akihiko Yamaguchi, [email protected]
#\version 0.1
#\date Apr.01, 2017
# http://stackoverflow.com/questions/14834494/pyqt-clicking-x-doesnt-trigger-closeevent
import sys
from PyQt4 import QtGui, QtCore, uic
class MainWindow(QtGui.QWidget):
def __init__(self):
QtGui.QWidget.__init__(self)
# Set window size.
self.resize(320, 240)
# Set window title
self.setWindowTitle("Hello World!")
# Add a button
btn= QtGui.QPushButton('Hello World!', self)
btn.setToolTip('Click to quit!')
btn.clicked.connect(self.close)
btn.resize(btn.sizeHint())
btn.move(100, 80)
def closeEvent(self, event):
print("event")
reply = QtGui.QMessageBox.question(self, 'Message',
"Are you sure to quit?", QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
event.accept()
else:
event.ignore()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
win = MainWindow()
win.show()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
2e4c319c80a704585fbab79e4c5ae8329e38f201 | ddc7e22952de6298d14b9297e765db29f327cfcb | /BFS/medium/minKnightMoves.py | ec82adee4ecf9c80d54548712c8789fa3cbcdfdb | [
"MIT"
] | permissive | linminhtoo/algorithms | 154a557b4acada2618aac09a8868db9f3722204f | 884422a7c9f531e7ccaae03ba1ccbd6966b23dd3 | refs/heads/master | 2023-03-21T23:01:58.386497 | 2021-03-16T07:13:32 | 2021-03-16T07:13:32 | 296,247,654 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,461 | py | # leetcode is premium problem
# see https://www.geeksforgeeks.org/minimum-steps-reach-target-knight/
# https://www.hackerrank.com/challenges/knightl-on-chessboard/problem <-- slightly harder version of the same problem (SCROLL DOWN)
# BFS
class Cell: # don't have to use this, can just use a tuple also (x, y, dist)
def __init__(self, x: int, y: int, dist: int):
self.x = x
self.y = y
self.dist = dist
from typing import Tuple
from collections import deque
class Solution:
def inBoard(self, x: int, y: int) -> bool:
return (0 <= x < 8) and (0 <= y < 8)
def minKnightMoves(self, knight_pos: Tuple[int, int],
target_pos: Tuple[int, int]) -> int:
dirs = [
(1, 2),
(2, 1),
(-1, -2),
(-2, -1),
(-1, 2),
(2, -1),
(1, -2),
(-2, 1)
]
queue = deque()
queue.append(Cell(knight_pos[0], knight_pos[1], 0))
visited = [[False] * 8 for _ in range(8)]
visited[knight_pos[0]][knight_pos[1]] = True
while queue:
now = queue.popleft()
if (now.x, now.y) == target_pos:
return now.dist
for i in range(8):
next_x = now.x + dirs[i][0]
next_y = now.y + dirs[i][1]
if self.inBoard(next_x, next_y):
if not visited[next_x][next_y]:
visited[next_x][next_y] = True
queue.append(Cell(next_x, next_y, now.dist + 1))
# https://www.hackerrank.com/challenges/knightl-on-chessboard/problem
class Solution_hackerrank_mine_passall:
def knightlOnAChessboard(self, n: int):
out = [[0]*(n-1) for _ in range(n-1)]
for i in range(1, n):
for j in range(1, n):
if out[j-1][i-1] != 0: # output array is symmetric
out[i-1][j-1] = out[j-1][i-1]
else:
out[i-1][j-1] = makeMove(n, i, j)
return out
@staticmethod
def inBoard(n: int, x: int, y: int) -> bool:
return (0 <= x < n) and (0 <= y < n)
@staticmethod
def makeMove(n: int, a: int, b: int) -> int:
dirs = [
(a, b),
(b, a),
(-a, b),
(b, -a),
(a, -b),
(-b, a),
(-a, -b),
(-b, -a)
]
queue = deque()
queue.append(Cell(0, 0, 0))
visited = [[False] * n for _ in range(n)]
visited[0][0] = True
while queue:
now = queue.popleft()
if (now.x, now.y) == (n-1, n-1):
return now.dist
for i in range(8):
next_x = now.x + dirs[i][0]
next_y = now.y + dirs[i][1]
if inBoard(n, next_x, next_y):
# exploit symmetry of chess board (start from topleft, end at bottomright)
# ONLY works in this special problem! (not for the generic leetcode problem above)
# offers small speedup
if visited[next_y][next_x]:
visited[next_x][next_y] = True
if not visited[next_x][next_y]:
visited[next_x][next_y] = True
queue.append(Cell(next_x, next_y, now.dist + 1))
return -1 | [
"[email protected]"
] | |
a9999691c3e277bd3c41bb28c97ea2216afad0fb | 508cd804441ce076b318df056153870d2fe52e1b | /sphere.py | e43689710948ecd61748515c08b01fe57e116aba | [] | no_license | archibate/taichi_works | ffe80e6df27b7bcb3ce1c4b24e23ceeb0ac4ff8a | 9aaae1de9fe53740030c6e24a0a57fc39d71dd71 | refs/heads/master | 2022-11-18T19:07:37.122093 | 2020-07-17T08:45:36 | 2020-07-17T08:45:36 | 276,714,718 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,284 | py | import taichi as ti
import taichi_glsl as tl
import random, math
ti.init()#kernel_profiler=True)
dt = 0.01
kMaxParticles = 1024
kResolution = 512
kKernelSize = 16 / 512
kKernelFactor = 0.5 / kKernelSize**2
kGravity = tl.vec(0.0, -0.0)
kUseImage = False
kBackgroundColor = 0x112f41
kParticleColor = 0x068587
kBoundaryColor = 0xebaca2
kParticleDisplaySize = 0.2 * kKernelSize * kResolution
particle_pos = ti.Vector(2, ti.f32, kMaxParticles)
particle_vel = ti.Vector(2, ti.f32, kMaxParticles)
property_vel = ti.Vector(2, ti.f32, kMaxParticles)
property_density = ti.var(ti.f32, kMaxParticles)
property_force = ti.Vector(2, ti.f32, kMaxParticles)
n_particles = ti.var(ti.i32, ())
if kUseImage:
image = ti.Vector(3, ti.f32, (kResolution, kResolution))
@ti.func
def smooth(distance):
ret = 0.0
r2 = distance.norm_sqr()
if r2 < kKernelSize**2:
ret = ti.exp(-r2 * kKernelFactor)
return ret
@ti.func
def grad_smooth(distance):
ret = tl.vec2(0.0)
r2 = distance.norm_sqr()
if r2 < kKernelSize**2:
ret = (-2 * kKernelFactor) * distance * ti.exp(-r2 * kKernelFactor)
return ret
@ti.func
def alloc_particle():
ret = ti.atomic_add(n_particles[None], 1)
assert ret < kMaxParticles
return ret
@ti.kernel
def add_particle_at(mx: ti.f32, my: ti.f32, vx: ti.f32, vy: ti.f32):
id = alloc_particle()
particle_pos[id] = tl.vec(mx, my)
particle_vel[id] = tl.vec(vx, vy)
@ti.func
def preupdate(rho, rho_0=1000, gamma=7.0, c_0=20.0):
b = rho_0 * c_0**2 / gamma
return b * ((rho / rho_0) ** gamma - 1.0)
@ti.func
def update_property():
for i in range(n_particles[None]):
my_pos = particle_pos[i]
property_vel[i] = particle_vel[i]
property_density[i] = 1.0
for j in range(n_particles[None]):
w = smooth(my_pos - particle_pos[j])
property_vel[i] += w * particle_vel[j]
property_density[i] += w
property_vel[i] /= property_density[i]
for i in range(n_particles[None]):
my_pos = particle_pos[i]
property_force[i] = tl.vec2(0.0)
for j in range(n_particles[None]):
dw = grad_smooth(my_pos - particle_pos[j])
ds = particle_pos[j] - particle_pos[i]
dv = particle_vel[j] - particle_vel[i]
force = dw * property_density[j] * dv.dot(ds)
property_force[i] += force
@ti.kernel
def substep():
update_property()
for i in range(n_particles[None]):
gravity = (0.5 - particle_pos[i]) * 2.0
particle_vel[i] += gravity * dt
particle_vel[i] += property_force[i] * dt
particle_vel[i] = tl.boundReflect(particle_pos[i], particle_vel[i],
kKernelSize, 1 - kKernelSize, 0)
particle_pos[i] += particle_vel[i] * dt
particle_pressure[i] = preupdate(particle_density)
@ti.kernel
def update_image():
for i in ti.grouped(image):
image[i] = tl.vec3(0)
for i in range(n_particles[None]):
pos = particle_pos[i]
A = ti.floor(max(0, pos - kKernelSize)) * kResolution
B = ti.ceil(min(1, pos + kKernelSize + 1)) * kResolution
for pix in ti.grouped(ti.ndrange((A.x, B.x), (A.y, B.y))):
pix_pos = pix / kResolution
w = smooth(pix_pos - particle_pos[i])
image[pix].x += w
last_mouse = tl.vec2(0.0)
gui = ti.GUI('WCSPH', kResolution, background_color=kBackgroundColor)
while gui.running:
for e in gui.get_events():
if e.key == gui.ESCAPE:
gui.running = False
elif e.key == gui.LMB:
if e.type == gui.PRESS:
last_mouse = tl.vec(*gui.get_cursor_pos())
else:
mouse = tl.vec(*gui.get_cursor_pos())
diff = (mouse - last_mouse) * 2.0
add_particle_at(mouse.x, mouse.y, diff.x, diff.y)
elif e.key == 'r':
a = random.random() * math.tau
add_particle_at(math.cos(a) * 0.4 + 0.5, math.sin(a) * 0.4 + 0.5, 0, 0)
substep()
if kUseImage:
update_image()
gui.set_image(image)
else:
gui.circles(particle_pos.to_numpy()[:n_particles[None]],
radius=kParticleDisplaySize, color=kParticleColor)
gui.show()
| [
"[email protected]"
] | |
50be32c063b21f51fb59e29080e17d63f03faeea | 77c2010bb9533ecbdfa46cd41c16ee5ae26e94fa | /library/migrations/0001_initial.py | d100e69ebfc03b3f1d153433b33548151de3b8ec | [] | no_license | dimansion/portfolio-django | b2cbb28dff97dd03cdf795f0bc661d39bcfae83d | 2dffe0e8579b2a426cb7aceb1ee085933b122d90 | refs/heads/master | 2020-05-23T08:15:38.205372 | 2017-03-05T14:44:14 | 2017-03-05T14:44:14 | 70,251,368 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,605 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-10-09 06:20
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(blank=True, null=True, upload_to='')),
('author', models.CharField(max_length=128, unique=True)),
('title', models.CharField(max_length=200)),
('synopsis', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('slug', models.SlugField()),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, unique=True)),
('slug', models.SlugField()),
],
),
migrations.AddField(
model_name='book',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='library.Category'),
),
]
| [
"[email protected]"
] | |
ee3747640d2d81beb67e38eb7bf9195041503fd6 | 51bdac517ec342a7a38a67e2b3c521f8bd53c5f2 | /numba/tests/pointers/test_null.py | fa46e26b67aa41253b5f4b2b6e874e710d7a3aaf | [
"BSD-2-Clause"
] | permissive | cu6yu4/numba | 66bc7ee751fdfaabab92b6f571dbff00cb4d7652 | f64aced5a7c94a434fd2d8c678d93ff8ac3ae1fb | refs/heads/master | 2020-12-25T13:45:44.629782 | 2013-01-25T20:28:12 | 2013-01-25T20:28:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 548 | py | import ctypes
import numba
from numba import *
#intp = ctypes.POINTER(ctypes.c_int)
#voidp = ctypes.c_void_p
intp = int_.pointer()
voidp = void.pointer()
@autojit
def test_compare_null():
"""
>>> test_compare_null()
True
"""
return intp(Py_uintptr_t(0)) == NULL
@autojit
def test_compare_null_attribute():
"""
>>> test_compare_null_attribute()
True
"""
return voidp(Py_uintptr_t(0)) == numba.NULL
if __name__ == '__main__':
# test_compare_null()
# test_compare_null_attribute()
numba.testmod() | [
"[email protected]"
] | |
72c869f6621fd60a33ce24785b818e1ae15e5e87 | 0fccee4c738449f5e0a8f52ea5acabf51db0e910 | /genfragments/ThirteenTeV/gluinoGMSB/gluinoGMSB_M2450_ctau10000p0_TuneCUETP8M1_13TeV_pythia8_cff.py | f7d05477a5be68d6fcfb3e3c2425fddbdc7d028d | [] | no_license | cms-sw/genproductions | f308ffaf3586c19b29853db40e6d662e937940ff | dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4 | refs/heads/master | 2023-08-30T17:26:02.581596 | 2023-08-29T14:53:43 | 2023-08-29T14:53:43 | 11,424,867 | 69 | 987 | null | 2023-09-14T12:41:28 | 2013-07-15T14:18:33 | Python | UTF-8 | Python | false | false | 50,207 | py | FLAVOR='gluino'
COM_ENERGY = 13000.
CROSS_SECTION = 0.000104886
CTAU = 10000.0
MASS_POINT = 2450
SLHA_TABLE="""
## Important note!
## This file has been modified by hand to give the gluino and the
## stop_1 a very narrow width, such that it can be used to try out
## the R-hadron machinery. It is not a realistic SUSY scenario.
##
##******************************************************************
## MadGraph/MadEvent *
##******************************************************************
## *
## param_card corresponding the SPS point 1a (by SoftSusy 2.0.5) *
## *
##******************************************************************
## Les Houches friendly file for the (MS)SM parameters of MadGraph *
## SM parameter set and decay widths produced by MSSMCalc *
##******************************************************************
##*Please note the following IMPORTANT issues: *
## *
##0. REFRAIN from editing this file by hand! Some of the parame- *
## ters are not independent. Always use a calculator. *
## *
##1. alpha_S(MZ) has been used in the calculation of the parameters*
## This value is KEPT by madgraph when no pdf are used lpp(i)=0, *
## but, for consistency, it will be reset by madgraph to the *
## value expected IF the pdfs for collisions with hadrons are *
## used. *
## *
##2. Values of the charm and bottom kinematic (pole) masses are *
## those used in the matrix elements and phase space UNLESS they *
## are set to ZERO from the start in the model (particles.dat) *
## This happens, for example, when using 5-flavor QCD where *
## charm and bottom are treated as partons in the initial state *
## and a zero mass might be hardwired in the model definition. *
## *
## The SUSY decays have calculated using SDECAY 1.1a *
## *
##******************************************************************
#
BLOCK DCINFO # Decay Program information
1 SDECAY # decay calculator
2 1.1a # version number
#
BLOCK SPINFO # Spectrum calculator information
1 SOFTSUSY # spectrum calculator
2 2.0.5 # version number
#
BLOCK MODSEL # Model selection
1 1 sugra
#
BLOCK SMINPUTS # Standard Model inputs
1 1.27934000E+02 # alpha_em^-1(M_Z)^MSbar
2 1.16637000E-05 # G_F [GeV^-2]
3 1.18000000E-01 # alpha_S(M_Z)^MSbar
4 9.11876000E+01 # M_Z pole mass
5 4.25000000E+00 # mb(mb)^MSbar
6 1.75000000E+02 # mt pole mass
7 1.77700000E+00 # mtau pole mass
#
BLOCK MINPAR # Input parameters - minimal models
1 1.00000000E+02 # m0
2 2.50000000E+02 # m12
3 1.00000000E+01 # tanb
4 1.00000000E+00 # sign(mu)
5 -1.00000000E+02 # A0
#
BLOCK MASS # Mass Spectrum
# PDG code mass particle
5 4.88991651E+00 # b-quark pole mass calculated from mb(mb)_Msbar
6 1.75000000E+02 # mt pole mass (not read by ME)
24 7.98290131E+01 # W+
25 1.25899057E+02 # h
35 3.99960116E+05 # H
36 3.99583917E+05 # A
37 4.07879012E+05 # H+
1000001 5.68441109E+05 # ~d_L
2000001 5.45228462E+05 # ~d_R
1000002 5.61119014E+05 # ~u_L
2000002 5.49259265E+05 # ~u_R
1000003 5.68441109E+05 # ~s_L
2000003 5.45228462E+05 # ~s_R
1000004 5.61119014E+05 # ~c_L
2000004 5.49259265E+05 # ~c_R
1000005 10000000 # ~b_1
2000005 5.12315123E+05 # ~b_2
1000006 1000000.00 # ~t_1
2000006 5.85785818E+05 # ~t_2
1000011 2.02915690E+05 # ~e_L
2000011 1.44102799E+05 # ~e_R
1000012 1.85258326E+05 # ~nu_eL
1000013 2.02915690E+05 # ~mu_L
2000013 1.44102799E+05 # ~mu_R
1000014 1.85258326E+05 # ~nu_muL
1000015 1.34490864E+05 # ~tau_1
2000015 2.06867805E+05 # ~tau_2
1000016 1.84708464E+05 # ~nu_tauL
1000021 2450 # ~g
1000022 10.0 # ~chi_10
1000023 1.81088157E+05 # ~chi_20
1000025 -3.63756027E+05 # ~chi_30
1000035 3.81729382E+05 # ~chi_40
1000024 1.81696474E+05 # ~chi_1+
1000037 3.79939320E+05 # ~chi_2+
#
BLOCK NMIX # Neutralino Mixing Matrix
1 1 9.86364430E-01 # N_11
1 2 -5.31103553E-02 # N_12
1 3 1.46433995E-01 # N_13
1 4 -5.31186117E-02 # N_14
2 1 9.93505358E-02 # N_21
2 2 9.44949299E-01 # N_22
2 3 -2.69846720E-01 # N_23
2 4 1.56150698E-01 # N_24
3 1 -6.03388002E-02 # N_31
3 2 8.77004854E-02 # N_32
3 3 6.95877493E-01 # N_33
3 4 7.10226984E-01 # N_34
4 1 -1.16507132E-01 # N_41
4 2 3.10739017E-01 # N_42
4 3 6.49225960E-01 # N_43
4 4 -6.84377823E-01 # N_44
#
BLOCK UMIX # Chargino Mixing Matrix U
1 1 9.16834859E-01 # U_11
1 2 -3.99266629E-01 # U_12
2 1 3.99266629E-01 # U_21
2 2 9.16834859E-01 # U_22
#
BLOCK VMIX # Chargino Mixing Matrix V
1 1 9.72557835E-01 # V_11
1 2 -2.32661249E-01 # V_12
2 1 2.32661249E-01 # V_21
2 2 9.72557835E-01 # V_22
#
BLOCK STOPMIX # Stop Mixing Matrix
1 1 5.53644960E-01 # O_{11}
1 2 8.32752820E-01 # O_{12}
2 1 8.32752820E-01 # O_{21}
2 2 -5.53644960E-01 # O_{22}
#
BLOCK SBOTMIX # Sbottom Mixing Matrix
1 1 9.38737896E-01 # O_{11}
1 2 3.44631925E-01 # O_{12}
2 1 -3.44631925E-01 # O_{21}
2 2 9.38737896E-01 # O_{22}
#
BLOCK STAUMIX # Stau Mixing Matrix
1 1 2.82487190E-01 # O_{11}
1 2 9.59271071E-01 # O_{12}
2 1 9.59271071E-01 # O_{21}
2 2 -2.82487190E-01 # O_{22}
#
BLOCK ALPHA # Higgs mixing
-1.13825210E-01 # Mixing angle in the neutral Higgs boson sector
#
BLOCK HMIX Q= 4.67034192E+02 # DRbar Higgs Parameters
1 3.57680977E+02 # mu(Q)MSSM DRbar
2 9.74862403E+00 # tan beta(Q)MSSM DRba
3 2.44894549E+02 # higgs vev(Q)MSSM DRb
4 1.66439065E+05 # mA^2(Q)MSSM DRbar
#
BLOCK GAUGE Q= 4.67034192E+02 # The gauge couplings
3 1.10178679E+00 # g3(Q) MSbar
#
BLOCK AU Q= 4.67034192E+02 # The trilinear couplings
1 1 0.00000000E+00 # A_u(Q) DRbar
2 2 0.00000000E+00 # A_c(Q) DRbar
3 3 -4.98129778E+02 # A_t(Q) DRbar
#
BLOCK AD Q= 4.67034192E+02 # The trilinear couplings
1 1 0.00000000E+00 # A_d(Q) DRbar
2 2 0.00000000E+00 # A_s(Q) DRbar
3 3 -7.97274397E+02 # A_b(Q) DRbar
#
BLOCK AE Q= 4.67034192E+02 # The trilinear couplings
1 1 0.00000000E+00 # A_e(Q) DRbar
2 2 0.00000000E+00 # A_mu(Q) DRbar
3 3 -2.51776873E+02 # A_tau(Q) DRbar
#
BLOCK YU Q= 4.67034192E+02 # The Yukawa couplings
3 3 8.92844550E-01 # y_t(Q) DRbar
#
BLOCK YD Q= 4.67034192E+02 # The Yukawa couplings
3 3 1.38840206E-01 # y_b(Q) DRbar
#
BLOCK YE Q= 4.67034192E+02 # The Yukawa couplings
3 3 1.00890810E-01 # y_tau(Q) DRbar
#
BLOCK MSOFT Q= 4.67034192E+02 # The soft SUSY breaking masses at the scale Q
1 1.01396534E+02 # M_1(Q)
2 1.91504241E+02 # M_2(Q)
3 5.88263031E+02 # M_3(Q)
21 3.23374943E+04 # mH1^2(Q)
22 -1.28800134E+05 # mH2^2(Q)
31 1.95334764E+02 # meL(Q)
32 1.95334764E+02 # mmuL(Q)
33 1.94495956E+02 # mtauL(Q)
34 1.36494061E+02 # meR(Q)
35 1.36494061E+02 # mmuR(Q)
36 1.34043428E+02 # mtauR(Q)
41 5.47573466E+02 # mqL1(Q)
42 5.47573466E+02 # mqL2(Q)
43 4.98763839E+02 # mqL3(Q)
44 5.29511195E+02 # muR(Q)
45 5.29511195E+02 # mcR(Q)
46 4.23245877E+02 # mtR(Q)
47 5.23148807E+02 # mdR(Q)
48 5.23148807E+02 # msR(Q)
49 5.19867261E+02 # mbR(Q)
#
#
#
# =================
# |The decay table|
# =================
#
# - The multi-body decays for the inos, stops and sbottoms are included.
#
# PDG Width
DECAY 25 1.98610799E-03 # h decays
# BR NDA ID1 ID2
1.45642955E-01 2 15 -15 # BR(H1 -> tau- tau+)
8.19070713E-01 2 5 -5 # BR(H1 -> b bb)
3.36338173E-02 2 24 -24 # BR(H1 -> W+ W-)
1.65251528E-03 2 23 23 # BR(H1 -> Z Z)
#
# PDG Width
DECAY 35 5.74801389E-01 # H decays
# BR NDA ID1 ID2
1.39072676E-01 2 15 -15 # BR(H -> tau- tau+)
4.84110879E-02 2 6 -6 # BR(H -> t tb)
7.89500067E-01 2 5 -5 # BR(H -> b bb)
3.87681171E-03 2 24 -24 # BR(H -> W+ W-)
1.80454752E-03 2 23 23 # BR(H -> Z Z)
0.00000000E+00 2 24 -37 # BR(H -> W+ H-)
0.00000000E+00 2 -24 37 # BR(H -> W- H+)
0.00000000E+00 2 37 -37 # BR(H -> H+ H-)
1.73348101E-02 2 25 25 # BR(H -> h h)
0.00000000E+00 2 36 36 # BR(H -> A A)
#
# PDG Width
DECAY 36 6.32178488E-01 # A decays
# BR NDA ID1 ID2
1.26659725E-01 2 15 -15 # BR(A -> tau- tau+)
1.51081526E-01 2 6 -6 # BR(A -> t tb)
7.19406137E-01 2 5 -5 # BR(A -> b bb)
2.85261228E-03 2 23 25 # BR(A -> Z h)
0.00000000E+00 2 23 35 # BR(A -> Z H)
0.00000000E+00 2 24 -37 # BR(A -> W+ H-)
0.00000000E+00 2 -24 37 # BR(A -> W- H+)
#
# PDG Width
DECAY 37 5.46962813E-01 # H+ decays
# BR NDA ID1 ID2
1.49435135E-01 2 -15 16 # BR(H+ -> tau+ nu_tau)
8.46811711E-01 2 6 -5 # BR(H+ -> t bb)
3.75315387E-03 2 24 25 # BR(H+ -> W+ h)
0.00000000E+00 2 24 35 # BR(H+ -> W+ H)
0.00000000E+00 2 24 36 # BR(H+ -> W+ A)
#
# PDG Width
DECAY 1000021 1.9732e-17 #1.9732E-14 # gluino decays
# BR NDA ID1 ID2
1.0 2 1000039 21
# 2.08454202E-02 2 1000001 -1 # BR(~g -> ~d_L db)
# 2.08454202E-02 2 -1000001 1 # BR(~g -> ~d_L* d )
# 5.07075274E-02 2 2000001 -1 # BR(~g -> ~d_R db)
# 5.07075274E-02 2 -2000001 1 # BR(~g -> ~d_R* d )
# 2.89787767E-02 2 1000002 -2 # BR(~g -> ~u_L ub)
# 2.89787767E-02 2 -1000002 2 # BR(~g -> ~u_L* u )
# 4.46872773E-02 2 2000002 -2 # BR(~g -> ~u_R ub)
# 4.46872773E-02 2 -2000002 2 # BR(~g -> ~u_R* u )
# 2.08454202E-02 2 1000003 -3 # BR(~g -> ~s_L sb)
# 2.08454202E-02 2 -1000003 3 # BR(~g -> ~s_L* s )
# 5.07075274E-02 2 2000003 -3 # BR(~g -> ~s_R sb)
# 5.07075274E-02 2 -2000003 3 # BR(~g -> ~s_R* s )
# 2.89787767E-02 2 1000004 -4 # BR(~g -> ~c_L cb)
# 2.89787767E-02 2 -1000004 4 # BR(~g -> ~c_L* c )
# 4.46872773E-02 2 2000004 -4 # BR(~g -> ~c_R cb)
# 4.46872773E-02 2 -2000004 4 # BR(~g -> ~c_R* c )
# 1.05840237E-01 2 1000005 -5 # BR(~g -> ~b_1 bb)
# 1.05840237E-01 2 -1000005 5 # BR(~g -> ~b_1* b )
# 5.56574805E-02 2 2000005 -5 # BR(~g -> ~b_2 bb)
# 5.56574805E-02 2 -2000005 5 # BR(~g -> ~b_2* b )
# 4.80642793E-02 2 1000006 -6 # BR(~g -> ~t_1 tb)
# 4.80642793E-02 2 -1000006 6 # BR(~g -> ~t_1* t )
# 0.00000000E+00 2 2000006 -6 # BR(~g -> ~t_2 tb)
# 0.00000000E+00 2 -2000006 6 # BR(~g -> ~t_2* t )
#
# PDG Width
DECAY 1000006 1.97326971684839e-14 # stop1 decays
# BR NDA ID1 ID2
5.0E-01 2 -5 -1
5.0E-01 2 -5 -3
# 3.33333333E-01 2 5 -11
# 3.33333333E-01 2 5 -13
# 3.33333333E-01 2 5 -15
# 1.92947616E-01 2 1000022 6 # BR(~t_1 -> ~chi_10 t )
# 1.17469211E-01 2 1000023 6 # BR(~t_1 -> ~chi_20 t )
# 0.00000000E+00 2 1000025 6 # BR(~t_1 -> ~chi_30 t )
# 0.00000000E+00 2 1000035 6 # BR(~t_1 -> ~chi_40 t )
# 6.75747693E-01 2 1000024 5 # BR(~t_1 -> ~chi_1+ b )
# 1.38354802E-02 2 1000037 5 # BR(~t_1 -> ~chi_2+ b )
# 0.00000000E+00 2 1000021 6 # BR(~t_1 -> ~g t )
# 0.00000000E+00 2 1000005 37 # BR(~t_1 -> ~b_1 H+)
# 0.00000000E+00 2 2000005 37 # BR(~t_1 -> ~b_2 H+)
# 0.00000000E+00 2 1000005 24 # BR(~t_1 -> ~b_1 W+)
# 0.00000000E+00 2 2000005 24 # BR(~t_1 -> ~b_2 W+)
#
# PDG Width
DECAY 2000006 7.37313275E+00 # stop2 decays
# BR NDA ID1 ID2
2.96825635E-02 2 1000022 6 # BR(~t_2 -> ~chi_10 t )
8.68035358E-02 2 1000023 6 # BR(~t_2 -> ~chi_20 t )
4.18408351E-02 2 1000025 6 # BR(~t_2 -> ~chi_30 t )
1.93281647E-01 2 1000035 6 # BR(~t_2 -> ~chi_40 t )
2.19632356E-01 2 1000024 5 # BR(~t_2 -> ~chi_1+ b )
2.02206148E-01 2 1000037 5 # BR(~t_2 -> ~chi_2+ b )
0.00000000E+00 2 1000021 6 # BR(~t_2 -> ~g t )
3.66397706E-02 2 1000006 25 # BR(~t_2 -> ~t_1 h )
0.00000000E+00 2 1000006 35 # BR(~t_2 -> ~t_1 H )
0.00000000E+00 2 1000006 36 # BR(~t_2 -> ~t_1 A )
0.00000000E+00 2 1000005 37 # BR(~t_2 -> ~b_1 H+)
0.00000000E+00 2 2000005 37 # BR(~t_2 -> ~b_2 H+)
1.89913144E-01 2 1000006 23 # BR(~t_2 -> ~t_1 Z )
0.00000000E+00 2 1000005 24 # BR(~t_2 -> ~b_1 W+)
0.00000000E+00 2 2000005 24 # BR(~t_2 -> ~b_2 W+)
#
# PDG Width
DECAY 1000005 1.97326971684839e-14 # sbottom1 decays
# BR NDA ID1 ID2
0.33333333 2 1 12
0.33333333 2 1 14
0.33333333 2 1 16
# 4.43307074E-02 2 1000022 5 # BR(~b_1 -> ~chi_10 b )
# 3.56319904E-01 2 1000023 5 # BR(~b_1 -> ~chi_20 b )
# 5.16083795E-03 2 1000025 5 # BR(~b_1 -> ~chi_30 b )
# 1.04105080E-02 2 1000035 5 # BR(~b_1 -> ~chi_40 b )
# 4.45830064E-01 2 -1000024 6 # BR(~b_1 -> ~chi_1- t )
# 0.00000000E+00 2 -1000037 6 # BR(~b_1 -> ~chi_2- t )
# 0.00000000E+00 2 1000021 5 # BR(~b_1 -> ~g b )
# 0.00000000E+00 2 1000006 -37 # BR(~b_1 -> ~t_1 H-)
# 0.00000000E+00 2 2000006 -37 # BR(~b_1 -> ~t_2 H-)
# 1.37947979E-01 2 1000006 -24 # BR(~b_1 -> ~t_1 W-)
# 0.00000000E+00 2 2000006 -24 # BR(~b_1 -> ~t_2 W-)
#
# PDG Width
DECAY 2000005 8.01566294E-01 # sbottom2 decays
# BR NDA ID1 ID2
2.86200590E-01 2 1000022 5 # BR(~b_2 -> ~chi_10 b )
1.40315912E-01 2 1000023 5 # BR(~b_2 -> ~chi_20 b )
5.32635592E-02 2 1000025 5 # BR(~b_2 -> ~chi_30 b )
7.48748121E-02 2 1000035 5 # BR(~b_2 -> ~chi_40 b )
1.79734294E-01 2 -1000024 6 # BR(~b_2 -> ~chi_1- t )
0.00000000E+00 2 -1000037 6 # BR(~b_2 -> ~chi_2- t )
0.00000000E+00 2 1000021 5 # BR(~b_2 -> ~g b )
0.00000000E+00 2 1000005 25 # BR(~b_2 -> ~b_1 h )
0.00000000E+00 2 1000005 35 # BR(~b_2 -> ~b_1 H )
0.00000000E+00 2 1000005 36 # BR(~b_2 -> ~b_1 A )
0.00000000E+00 2 1000006 -37 # BR(~b_2 -> ~t_1 H-)
0.00000000E+00 2 2000006 -37 # BR(~b_2 -> ~t_2 H-)
0.00000000E+00 2 1000005 23 # BR(~b_2 -> ~b_1 Z )
2.65610832E-01 2 1000006 -24 # BR(~b_2 -> ~t_1 W-)
0.00000000E+00 2 2000006 -24 # BR(~b_2 -> ~t_2 W-)
#
# PDG Width
DECAY 1000002 5.47719539E+00 # sup_L decays
# BR NDA ID1 ID2
6.65240987E-03 2 1000022 2 # BR(~u_L -> ~chi_10 u)
3.19051458E-01 2 1000023 2 # BR(~u_L -> ~chi_20 u)
8.44929059E-04 2 1000025 2 # BR(~u_L -> ~chi_30 u)
1.03485173E-02 2 1000035 2 # BR(~u_L -> ~chi_40 u)
6.49499518E-01 2 1000024 1 # BR(~u_L -> ~chi_1+ d)
1.36031676E-02 2 1000037 1 # BR(~u_L -> ~chi_2+ d)
0.00000000E+00 2 1000021 2 # BR(~u_L -> ~g u)
#
# PDG Width
DECAY 2000002 1.15297292E+00 # sup_R decays
# BR NDA ID1 ID2
9.86377420E-01 2 1000022 2 # BR(~u_R -> ~chi_10 u)
8.46640647E-03 2 1000023 2 # BR(~u_R -> ~chi_20 u)
1.23894695E-03 2 1000025 2 # BR(~u_R -> ~chi_30 u)
3.91722611E-03 2 1000035 2 # BR(~u_R -> ~chi_40 u)
0.00000000E+00 2 1000024 1 # BR(~u_R -> ~chi_1+ d)
0.00000000E+00 2 1000037 1 # BR(~u_R -> ~chi_2+ d)
0.00000000E+00 2 1000021 2 # BR(~u_R -> ~g u)
#
# PDG Width
DECAY 1000001 5.31278772E+00 # sdown_L decays
# BR NDA ID1 ID2
2.32317969E-02 2 1000022 1 # BR(~d_L -> ~chi_10 d)
3.10235077E-01 2 1000023 1 # BR(~d_L -> ~chi_20 d)
1.52334771E-03 2 1000025 1 # BR(~d_L -> ~chi_30 d)
1.48849798E-02 2 1000035 1 # BR(~d_L -> ~chi_40 d)
6.06452481E-01 2 -1000024 2 # BR(~d_L -> ~chi_1- u)
4.36723179E-02 2 -1000037 2 # BR(~d_L -> ~chi_2- u)
0.00000000E+00 2 1000021 1 # BR(~d_L -> ~g d)
#
# PDG Width
DECAY 2000001 2.85812308E-01 # sdown_R decays
# BR NDA ID1 ID2
9.86529614E-01 2 1000022 1 # BR(~d_R -> ~chi_10 d)
8.44510350E-03 2 1000023 1 # BR(~d_R -> ~chi_20 d)
1.21172119E-03 2 1000025 1 # BR(~d_R -> ~chi_30 d)
3.81356102E-03 2 1000035 1 # BR(~d_R -> ~chi_40 d)
0.00000000E+00 2 -1000024 2 # BR(~d_R -> ~chi_1- u)
0.00000000E+00 2 -1000037 2 # BR(~d_R -> ~chi_2- u)
0.00000000E+00 2 1000021 1 # BR(~d_R -> ~g d)
#
# PDG Width
DECAY 1000004 5.47719539E+00 # scharm_L decays
# BR NDA ID1 ID2
6.65240987E-03 2 1000022 4 # BR(~c_L -> ~chi_10 c)
3.19051458E-01 2 1000023 4 # BR(~c_L -> ~chi_20 c)
8.44929059E-04 2 1000025 4 # BR(~c_L -> ~chi_30 c)
1.03485173E-02 2 1000035 4 # BR(~c_L -> ~chi_40 c)
6.49499518E-01 2 1000024 3 # BR(~c_L -> ~chi_1+ s)
1.36031676E-02 2 1000037 3 # BR(~c_L -> ~chi_2+ s)
0.00000000E+00 2 1000021 4 # BR(~c_L -> ~g c)
#
# PDG Width
DECAY 2000004 1.15297292E+00 # scharm_R decays
# BR NDA ID1 ID2
9.86377420E-01 2 1000022 4 # BR(~c_R -> ~chi_10 c)
8.46640647E-03 2 1000023 4 # BR(~c_R -> ~chi_20 c)
1.23894695E-03 2 1000025 4 # BR(~c_R -> ~chi_30 c)
3.91722611E-03 2 1000035 4 # BR(~c_R -> ~chi_40 c)
0.00000000E+00 2 1000024 3 # BR(~c_R -> ~chi_1+ s)
0.00000000E+00 2 1000037 3 # BR(~c_R -> ~chi_2+ s)
0.00000000E+00 2 1000021 4 # BR(~c_R -> ~g c)
#
# PDG Width
DECAY 1000003 5.31278772E+00 # sstrange_L decays
# BR NDA ID1 ID2
2.32317969E-02 2 1000022 3 # BR(~s_L -> ~chi_10 s)
3.10235077E-01 2 1000023 3 # BR(~s_L -> ~chi_20 s)
1.52334771E-03 2 1000025 3 # BR(~s_L -> ~chi_30 s)
1.48849798E-02 2 1000035 3 # BR(~s_L -> ~chi_40 s)
6.06452481E-01 2 -1000024 4 # BR(~s_L -> ~chi_1- c)
4.36723179E-02 2 -1000037 4 # BR(~s_L -> ~chi_2- c)
0.00000000E+00 2 1000021 3 # BR(~s_L -> ~g s)
#
# PDG Width
DECAY 2000003 2.85812308E-01 # sstrange_R decays
# BR NDA ID1 ID2
9.86529614E-01 2 1000022 3 # BR(~s_R -> ~chi_10 s)
8.44510350E-03 2 1000023 3 # BR(~s_R -> ~chi_20 s)
1.21172119E-03 2 1000025 3 # BR(~s_R -> ~chi_30 s)
3.81356102E-03 2 1000035 3 # BR(~s_R -> ~chi_40 s)
0.00000000E+00 2 -1000024 4 # BR(~s_R -> ~chi_1- c)
0.00000000E+00 2 -1000037 4 # BR(~s_R -> ~chi_2- c)
0.00000000E+00 2 1000021 3 # BR(~s_R -> ~g s)
#
# PDG Width
DECAY 1000011 2.13682161E-01 # selectron_L decays
# BR NDA ID1 ID2
5.73155386E-01 2 1000022 11 # BR(~e_L -> ~chi_10 e-)
1.64522579E-01 2 1000023 11 # BR(~e_L -> ~chi_20 e-)
0.00000000E+00 2 1000025 11 # BR(~e_L -> ~chi_30 e-)
0.00000000E+00 2 1000035 11 # BR(~e_L -> ~chi_40 e-)
2.62322035E-01 2 -1000024 12 # BR(~e_L -> ~chi_1- nu_e)
0.00000000E+00 2 -1000037 12 # BR(~e_L -> ~chi_2- nu_e)
#
# PDG Width
DECAY 2000011 2.16121626E-01 # selectron_R decays
# BR NDA ID1 ID2
1.00000000E+00 2 1000022 11 # BR(~e_R -> ~chi_10 e-)
0.00000000E+00 2 1000023 11 # BR(~e_R -> ~chi_20 e-)
0.00000000E+00 2 1000025 11 # BR(~e_R -> ~chi_30 e-)
0.00000000E+00 2 1000035 11 # BR(~e_R -> ~chi_40 e-)
0.00000000E+00 2 -1000024 12 # BR(~e_R -> ~chi_1- nu_e)
0.00000000E+00 2 -1000037 12 # BR(~e_R -> ~chi_2- nu_e)
#
# PDG Width
DECAY 1000013 2.13682161E-01 # smuon_L decays
# BR NDA ID1 ID2
5.73155386E-01 2 1000022 13 # BR(~mu_L -> ~chi_10 mu-)
1.64522579E-01 2 1000023 13 # BR(~mu_L -> ~chi_20 mu-)
0.00000000E+00 2 1000025 13 # BR(~mu_L -> ~chi_30 mu-)
0.00000000E+00 2 1000035 13 # BR(~mu_L -> ~chi_40 mu-)
2.62322035E-01 2 -1000024 14 # BR(~mu_L -> ~chi_1- nu_mu)
0.00000000E+00 2 -1000037 14 # BR(~mu_L -> ~chi_2- nu_mu)
#
# PDG Width
DECAY 2000013 2.16121626E-01 # smuon_R decays
# BR NDA ID1 ID2
1.00000000E+00 2 1000022 13 # BR(~mu_R -> ~chi_10 mu-)
0.00000000E+00 2 1000023 13 # BR(~mu_R -> ~chi_20 mu-)
0.00000000E+00 2 1000025 13 # BR(~mu_R -> ~chi_30 mu-)
0.00000000E+00 2 1000035 13 # BR(~mu_R -> ~chi_40 mu-)
0.00000000E+00 2 -1000024 14 # BR(~mu_R -> ~chi_1- nu_mu)
0.00000000E+00 2 -1000037 14 # BR(~mu_R -> ~chi_2- nu_mu)
#
# PDG Width
DECAY 1000015 1.48327268E-01 # stau_1 decays
# BR NDA ID1 ID2
1.00000000E+00 2 1000022 15 # BR(~tau_1 -> ~chi_10 tau-)
0.00000000E+00 2 1000023 15 # BR(~tau_1 -> ~chi_20 tau-)
0.00000000E+00 2 1000025 15 # BR(~tau_1 -> ~chi_30 tau-)
0.00000000E+00 2 1000035 15 # BR(~tau_1 -> ~chi_40 tau-)
0.00000000E+00 2 -1000024 16 # BR(~tau_1 -> ~chi_1- nu_tau)
0.00000000E+00 2 -1000037 16 # BR(~tau_1 -> ~chi_2- nu_tau)
0.00000000E+00 2 1000016 -37 # BR(~tau_1 -> ~nu_tauL H-)
0.00000000E+00 2 1000016 -24 # BR(~tau_1 -> ~nu_tauL W-)
#
# PDG Width
DECAY 2000015 2.69906096E-01 # stau_2 decays
# BR NDA ID1 ID2
5.96653046E-01 2 1000022 15 # BR(~tau_2 -> ~chi_10 tau-)
1.54536760E-01 2 1000023 15 # BR(~tau_2 -> ~chi_20 tau-)
0.00000000E+00 2 1000025 15 # BR(~tau_2 -> ~chi_30 tau-)
0.00000000E+00 2 1000035 15 # BR(~tau_2 -> ~chi_40 tau-)
2.48810195E-01 2 -1000024 16 # BR(~tau_2 -> ~chi_1- nu_tau)
0.00000000E+00 2 -1000037 16 # BR(~tau_2 -> ~chi_2- nu_tau)
0.00000000E+00 2 1000016 -37 # BR(~tau_2 -> ~nu_tauL H-)
0.00000000E+00 2 1000016 -24 # BR(~tau_2 -> ~nu_tauL W-)
0.00000000E+00 2 1000015 25 # BR(~tau_2 -> ~tau_1 h)
0.00000000E+00 2 1000015 35 # BR(~tau_2 -> ~tau_1 H)
0.00000000E+00 2 1000015 36 # BR(~tau_2 -> ~tau_1 A)
0.00000000E+00 2 1000015 23 # BR(~tau_2 -> ~tau_1 Z)
#
# PDG Width
DECAY 1000012 1.49881634E-01 # snu_eL decays
# BR NDA ID1 ID2
9.77700764E-01 2 1000022 12 # BR(~nu_eL -> ~chi_10 nu_e)
8.11554922E-03 2 1000023 12 # BR(~nu_eL -> ~chi_20 nu_e)
0.00000000E+00 2 1000025 12 # BR(~nu_eL -> ~chi_30 nu_e)
0.00000000E+00 2 1000035 12 # BR(~nu_eL -> ~chi_40 nu_e)
1.41836867E-02 2 1000024 11 # BR(~nu_eL -> ~chi_1+ e-)
0.00000000E+00 2 1000037 11 # BR(~nu_eL -> ~chi_2+ e-)
#
# PDG Width
DECAY 1000014 1.49881634E-01 # snu_muL decays
# BR NDA ID1 ID2
9.77700764E-01 2 1000022 14 # BR(~nu_muL -> ~chi_10 nu_mu)
8.11554922E-03 2 1000023 14 # BR(~nu_muL -> ~chi_20 nu_mu)
0.00000000E+00 2 1000025 14 # BR(~nu_muL -> ~chi_30 nu_mu)
0.00000000E+00 2 1000035 14 # BR(~nu_muL -> ~chi_40 nu_mu)
1.41836867E-02 2 1000024 13 # BR(~nu_muL -> ~chi_1+ mu-)
0.00000000E+00 2 1000037 13 # BR(~nu_muL -> ~chi_2+ mu-)
#
# PDG Width
DECAY 1000016 1.47518977E-01 # snu_tauL decays
# BR NDA ID1 ID2
9.85994529E-01 2 1000022 16 # BR(~nu_tauL -> ~chi_10 nu_tau)
6.25129612E-03 2 1000023 16 # BR(~nu_tauL -> ~chi_20 nu_tau)
0.00000000E+00 2 1000025 16 # BR(~nu_tauL -> ~chi_30 nu_tau)
0.00000000E+00 2 1000035 16 # BR(~nu_tauL -> ~chi_40 nu_tau)
7.75417479E-03 2 1000024 15 # BR(~nu_tauL -> ~chi_1+ tau-)
0.00000000E+00 2 1000037 15 # BR(~nu_tauL -> ~chi_2+ tau-)
0.00000000E+00 2 -1000015 -37 # BR(~nu_tauL -> ~tau_1+ H-)
0.00000000E+00 2 -2000015 -37 # BR(~nu_tauL -> ~tau_2+ H-)
0.00000000E+00 2 -1000015 -24 # BR(~nu_tauL -> ~tau_1+ W-)
0.00000000E+00 2 -2000015 -24 # BR(~nu_tauL -> ~tau_2+ W-)
#
# PDG Width
DECAY 1000024 1.70414503E-02 # chargino1+ decays
# BR NDA ID1 ID2
0.00000000E+00 2 1000002 -1 # BR(~chi_1+ -> ~u_L db)
0.00000000E+00 2 2000002 -1 # BR(~chi_1+ -> ~u_R db)
0.00000000E+00 2 -1000001 2 # BR(~chi_1+ -> ~d_L* u )
0.00000000E+00 2 -2000001 2 # BR(~chi_1+ -> ~d_R* u )
0.00000000E+00 2 1000004 -3 # BR(~chi_1+ -> ~c_L sb)
0.00000000E+00 2 2000004 -3 # BR(~chi_1+ -> ~c_R sb)
0.00000000E+00 2 -1000003 4 # BR(~chi_1+ -> ~s_L* c )
0.00000000E+00 2 -2000003 4 # BR(~chi_1+ -> ~s_R* c )
0.00000000E+00 2 1000006 -5 # BR(~chi_1+ -> ~t_1 bb)
0.00000000E+00 2 2000006 -5 # BR(~chi_1+ -> ~t_2 bb)
0.00000000E+00 2 -1000005 6 # BR(~chi_1+ -> ~b_1* t )
0.00000000E+00 2 -2000005 6 # BR(~chi_1+ -> ~b_2* t )
0.00000000E+00 2 1000012 -11 # BR(~chi_1+ -> ~nu_eL e+ )
0.00000000E+00 2 1000014 -13 # BR(~chi_1+ -> ~nu_muL mu+ )
0.00000000E+00 2 1000016 -15 # BR(~chi_1+ -> ~nu_tau1 tau+)
0.00000000E+00 2 -1000011 12 # BR(~chi_1+ -> ~e_L+ nu_e)
0.00000000E+00 2 -2000011 12 # BR(~chi_1+ -> ~e_R+ nu_e)
0.00000000E+00 2 -1000013 14 # BR(~chi_1+ -> ~mu_L+ nu_mu)
0.00000000E+00 2 -2000013 14 # BR(~chi_1+ -> ~mu_R+ nu_mu)
9.25161117E-01 2 -1000015 16 # BR(~chi_1+ -> ~tau_1+ nu_tau)
0.00000000E+00 2 -2000015 16 # BR(~chi_1+ -> ~tau_2+ nu_tau)
7.48388828E-02 2 1000022 24 # BR(~chi_1+ -> ~chi_10 W+)
0.00000000E+00 2 1000023 24 # BR(~chi_1+ -> ~chi_20 W+)
0.00000000E+00 2 1000025 24 # BR(~chi_1+ -> ~chi_30 W+)
0.00000000E+00 2 1000035 24 # BR(~chi_1+ -> ~chi_40 W+)
0.00000000E+00 2 1000022 37 # BR(~chi_1+ -> ~chi_10 H+)
0.00000000E+00 2 1000023 37 # BR(~chi_1+ -> ~chi_20 H+)
0.00000000E+00 2 1000025 37 # BR(~chi_1+ -> ~chi_30 H+)
0.00000000E+00 2 1000035 37 # BR(~chi_1+ -> ~chi_40 H+)
#
# PDG Width
DECAY 1000037 2.48689510E+00 # chargino2+ decays
# BR NDA ID1 ID2
0.00000000E+00 2 1000002 -1 # BR(~chi_2+ -> ~u_L db)
0.00000000E+00 2 2000002 -1 # BR(~chi_2+ -> ~u_R db)
0.00000000E+00 2 -1000001 2 # BR(~chi_2+ -> ~d_L* u )
0.00000000E+00 2 -2000001 2 # BR(~chi_2+ -> ~d_R* u )
0.00000000E+00 2 1000004 -3 # BR(~chi_2+ -> ~c_L sb)
0.00000000E+00 2 2000004 -3 # BR(~chi_2+ -> ~c_R sb)
0.00000000E+00 2 -1000003 4 # BR(~chi_2+ -> ~s_L* c )
0.00000000E+00 2 -2000003 4 # BR(~chi_2+ -> ~s_R* c )
0.00000000E+00 2 1000006 -5 # BR(~chi_2+ -> ~t_1 bb)
0.00000000E+00 2 2000006 -5 # BR(~chi_2+ -> ~t_2 bb)
0.00000000E+00 2 -1000005 6 # BR(~chi_2+ -> ~b_1* t )
0.00000000E+00 2 -2000005 6 # BR(~chi_2+ -> ~b_2* t )
2.00968837E-02 2 1000012 -11 # BR(~chi_2+ -> ~nu_eL e+ )
2.00968837E-02 2 1000014 -13 # BR(~chi_2+ -> ~nu_muL mu+ )
2.74507395E-02 2 1000016 -15 # BR(~chi_2+ -> ~nu_tau1 tau+)
5.20406111E-02 2 -1000011 12 # BR(~chi_2+ -> ~e_L+ nu_e)
0.00000000E+00 2 -2000011 12 # BR(~chi_2+ -> ~e_R+ nu_e)
5.20406111E-02 2 -1000013 14 # BR(~chi_2+ -> ~mu_L+ nu_mu)
0.00000000E+00 2 -2000013 14 # BR(~chi_2+ -> ~mu_R+ nu_mu)
2.82859898E-04 2 -1000015 16 # BR(~chi_2+ -> ~tau_1+ nu_tau)
5.66729336E-02 2 -2000015 16 # BR(~chi_2+ -> ~tau_2+ nu_tau)
2.31513269E-01 2 1000024 23 # BR(~chi_2+ -> ~chi_1+ Z )
6.76715120E-02 2 1000022 24 # BR(~chi_2+ -> ~chi_10 W+)
2.93654849E-01 2 1000023 24 # BR(~chi_2+ -> ~chi_20 W+)
0.00000000E+00 2 1000025 24 # BR(~chi_2+ -> ~chi_30 W+)
0.00000000E+00 2 1000035 24 # BR(~chi_2+ -> ~chi_40 W+)
1.78478848E-01 2 1000024 25 # BR(~chi_2+ -> ~chi_1+ h )
0.00000000E+00 2 1000024 35 # BR(~chi_2+ -> ~chi_1+ H )
0.00000000E+00 2 1000024 36 # BR(~chi_2+ -> ~chi_1+ A )
0.00000000E+00 2 1000022 37 # BR(~chi_2+ -> ~chi_10 H+)
0.00000000E+00 2 1000023 37 # BR(~chi_2+ -> ~chi_20 H+)
0.00000000E+00 2 1000025 37 # BR(~chi_2+ -> ~chi_30 H+)
0.00000000E+00 2 1000035 37 # BR(~chi_2+ -> ~chi_40 H+)
#
# PDG Width
DECAY 1000022 0.00000000E+00 # neutralino1 decays
#
# PDG Width
DECAY 1000023 2.07770048E-02 # neutralino2 decays
# BR NDA ID1 ID2
0.00000000E+00 2 1000022 23 # BR(~chi_20 -> ~chi_10 Z )
0.00000000E+00 2 1000024 -24 # BR(~chi_20 -> ~chi_1+ W-)
0.00000000E+00 2 -1000024 24 # BR(~chi_20 -> ~chi_1- W+)
0.00000000E+00 2 1000037 -24 # BR(~chi_20 -> ~chi_2+ W-)
0.00000000E+00 2 -1000037 24 # BR(~chi_20 -> ~chi_2- W+)
0.00000000E+00 2 1000022 25 # BR(~chi_20 -> ~chi_10 h )
0.00000000E+00 2 1000022 35 # BR(~chi_20 -> ~chi_10 H )
0.00000000E+00 2 1000022 36 # BR(~chi_20 -> ~chi_10 A )
0.00000000E+00 2 1000024 -37 # BR(~chi_20 -> ~chi_1+ H-)
0.00000000E+00 2 -1000024 37 # BR(~chi_20 -> ~chi_1- H+)
0.00000000E+00 2 1000037 -37 # BR(~chi_20 -> ~chi_2+ H-)
0.00000000E+00 2 -1000037 37 # BR(~chi_20 -> ~chi_2- H+)
0.00000000E+00 2 1000002 -2 # BR(~chi_20 -> ~u_L ub)
0.00000000E+00 2 -1000002 2 # BR(~chi_20 -> ~u_L* u )
0.00000000E+00 2 2000002 -2 # BR(~chi_20 -> ~u_R ub)
0.00000000E+00 2 -2000002 2 # BR(~chi_20 -> ~u_R* u )
0.00000000E+00 2 1000001 -1 # BR(~chi_20 -> ~d_L db)
0.00000000E+00 2 -1000001 1 # BR(~chi_20 -> ~d_L* d )
0.00000000E+00 2 2000001 -1 # BR(~chi_20 -> ~d_R db)
0.00000000E+00 2 -2000001 1 # BR(~chi_20 -> ~d_R* d )
0.00000000E+00 2 1000004 -4 # BR(~chi_20 -> ~c_L cb)
0.00000000E+00 2 -1000004 4 # BR(~chi_20 -> ~c_L* c )
0.00000000E+00 2 2000004 -4 # BR(~chi_20 -> ~c_R cb)
0.00000000E+00 2 -2000004 4 # BR(~chi_20 -> ~c_R* c )
0.00000000E+00 2 1000003 -3 # BR(~chi_20 -> ~s_L sb)
0.00000000E+00 2 -1000003 3 # BR(~chi_20 -> ~s_L* s )
0.00000000E+00 2 2000003 -3 # BR(~chi_20 -> ~s_R sb)
0.00000000E+00 2 -2000003 3 # BR(~chi_20 -> ~s_R* s )
0.00000000E+00 2 1000006 -6 # BR(~chi_20 -> ~t_1 tb)
0.00000000E+00 2 -1000006 6 # BR(~chi_20 -> ~t_1* t )
0.00000000E+00 2 2000006 -6 # BR(~chi_20 -> ~t_2 tb)
0.00000000E+00 2 -2000006 6 # BR(~chi_20 -> ~t_2* t )
0.00000000E+00 2 1000005 -5 # BR(~chi_20 -> ~b_1 bb)
0.00000000E+00 2 -1000005 5 # BR(~chi_20 -> ~b_1* b )
0.00000000E+00 2 2000005 -5 # BR(~chi_20 -> ~b_2 bb)
0.00000000E+00 2 -2000005 5 # BR(~chi_20 -> ~b_2* b )
0.00000000E+00 2 1000011 -11 # BR(~chi_20 -> ~e_L- e+)
0.00000000E+00 2 -1000011 11 # BR(~chi_20 -> ~e_L+ e-)
2.95071995E-02 2 2000011 -11 # BR(~chi_20 -> ~e_R- e+)
2.95071995E-02 2 -2000011 11 # BR(~chi_20 -> ~e_R+ e-)
0.00000000E+00 2 1000013 -13 # BR(~chi_20 -> ~mu_L- mu+)
0.00000000E+00 2 -1000013 13 # BR(~chi_20 -> ~mu_L+ mu-)
2.95071995E-02 2 2000013 -13 # BR(~chi_20 -> ~mu_R- mu+)
2.95071995E-02 2 -2000013 13 # BR(~chi_20 -> ~mu_R+ mu-)
4.40985601E-01 2 1000015 -15 # BR(~chi_20 -> ~tau_1- tau+)
4.40985601E-01 2 -1000015 15 # BR(~chi_20 -> ~tau_1+ tau-)
0.00000000E+00 2 2000015 -15 # BR(~chi_20 -> ~tau_2- tau+)
0.00000000E+00 2 -2000015 15 # BR(~chi_20 -> ~tau_2+ tau-)
0.00000000E+00 2 1000012 -12 # BR(~chi_20 -> ~nu_eL nu_eb)
0.00000000E+00 2 -1000012 12 # BR(~chi_20 -> ~nu_eL* nu_e )
0.00000000E+00 2 1000014 -14 # BR(~chi_20 -> ~nu_muL nu_mub)
0.00000000E+00 2 -1000014 14 # BR(~chi_20 -> ~nu_muL* nu_mu )
0.00000000E+00 2 1000016 -16 # BR(~chi_20 -> ~nu_tau1 nu_taub)
0.00000000E+00 2 -1000016 16 # BR(~chi_20 -> ~nu_tau1* nu_tau )
#
# PDG Width
DECAY 1000025 1.91598495E+00 # neutralino3 decays
# BR NDA ID1 ID2
1.13226601E-01 2 1000022 23 # BR(~chi_30 -> ~chi_10 Z )
2.11969194E-01 2 1000023 23 # BR(~chi_30 -> ~chi_20 Z )
2.95329778E-01 2 1000024 -24 # BR(~chi_30 -> ~chi_1+ W-)
2.95329778E-01 2 -1000024 24 # BR(~chi_30 -> ~chi_1- W+)
0.00000000E+00 2 1000037 -24 # BR(~chi_30 -> ~chi_2+ W-)
0.00000000E+00 2 -1000037 24 # BR(~chi_30 -> ~chi_2- W+)
2.13076490E-02 2 1000022 25 # BR(~chi_30 -> ~chi_10 h )
0.00000000E+00 2 1000022 35 # BR(~chi_30 -> ~chi_10 H )
0.00000000E+00 2 1000022 36 # BR(~chi_30 -> ~chi_10 A )
1.24538329E-02 2 1000023 25 # BR(~chi_30 -> ~chi_20 h )
0.00000000E+00 2 1000023 35 # BR(~chi_30 -> ~chi_20 H )
0.00000000E+00 2 1000023 36 # BR(~chi_30 -> ~chi_20 A )
0.00000000E+00 2 1000024 -37 # BR(~chi_30 -> ~chi_1+ H-)
0.00000000E+00 2 -1000024 37 # BR(~chi_30 -> ~chi_1- H+)
0.00000000E+00 2 1000037 -37 # BR(~chi_30 -> ~chi_2+ H-)
0.00000000E+00 2 -1000037 37 # BR(~chi_30 -> ~chi_2- H+)
0.00000000E+00 2 1000002 -2 # BR(~chi_30 -> ~u_L ub)
0.00000000E+00 2 -1000002 2 # BR(~chi_30 -> ~u_L* u )
0.00000000E+00 2 2000002 -2 # BR(~chi_30 -> ~u_R ub)
0.00000000E+00 2 -2000002 2 # BR(~chi_30 -> ~u_R* u )
0.00000000E+00 2 1000001 -1 # BR(~chi_30 -> ~d_L db)
0.00000000E+00 2 -1000001 1 # BR(~chi_30 -> ~d_L* d )
0.00000000E+00 2 2000001 -1 # BR(~chi_30 -> ~d_R db)
0.00000000E+00 2 -2000001 1 # BR(~chi_30 -> ~d_R* d )
0.00000000E+00 2 1000004 -4 # BR(~chi_30 -> ~c_L cb)
0.00000000E+00 2 -1000004 4 # BR(~chi_30 -> ~c_L* c )
0.00000000E+00 2 2000004 -4 # BR(~chi_30 -> ~c_R cb) 0.00000000E+00 2 -2000004 4 # BR(~chi_30 -> ~c_R* c )
0.00000000E+00 2 1000003 -3 # BR(~chi_30 -> ~s_L sb)
0.00000000E+00 2 -1000003 3 # BR(~chi_30 -> ~s_L* s )
0.00000000E+00 2 2000003 -3 # BR(~chi_30 -> ~s_R sb)
0.00000000E+00 2 -2000003 3 # BR(~chi_30 -> ~s_R* s )
0.00000000E+00 2 1000006 -6 # BR(~chi_30 -> ~t_1 tb)
0.00000000E+00 2 -1000006 6 # BR(~chi_30 -> ~t_1* t )
0.00000000E+00 2 2000006 -6 # BR(~chi_30 -> ~t_2 tb)
0.00000000E+00 2 -2000006 6 # BR(~chi_30 -> ~t_2* t )
0.00000000E+00 2 1000005 -5 # BR(~chi_30 -> ~b_1 bb)
0.00000000E+00 2 -1000005 5 # BR(~chi_30 -> ~b_1* b )
0.00000000E+00 2 2000005 -5 # BR(~chi_30 -> ~b_2 bb)
0.00000000E+00 2 -2000005 5 # BR(~chi_30 -> ~b_2* b )
5.57220455E-04 2 1000011 -11 # BR(~chi_30 -> ~e_L- e+)
5.57220455E-04 2 -1000011 11 # BR(~chi_30 -> ~e_L+ e-)
1.25266782E-03 2 2000011 -11 # BR(~chi_30 -> ~e_R- e+)
1.25266782E-03 2 -2000011 11 # BR(~chi_30 -> ~e_R+ e-)
5.57220455E-04 2 1000013 -13 # BR(~chi_30 -> ~mu_L- mu+)
5.57220455E-04 2 -1000013 13 # BR(~chi_30 -> ~mu_L+ mu-)
1.25266782E-03 2 2000013 -13 # BR(~chi_30 -> ~mu_R- mu+)
1.25266782E-03 2 -2000013 13 # BR(~chi_30 -> ~mu_R+ mu-)
5.26279239E-03 2 1000015 -15 # BR(~chi_30 -> ~tau_1- tau+)
5.26279239E-03 2 -1000015 15 # BR(~chi_30 -> ~tau_1+ tau-)
6.72814564E-03 2 2000015 -15 # BR(~chi_30 -> ~tau_2- tau+)
6.72814564E-03 2 -2000015 15 # BR(~chi_30 -> ~tau_2+ tau-)
3.18920485E-03 2 1000012 -12 # BR(~chi_30 -> ~nu_eL nu_eb)
3.18920485E-03 2 -1000012 12 # BR(~chi_30 -> ~nu_eL* nu_e )
3.18920485E-03 2 1000014 -14 # BR(~chi_30 -> ~nu_muL nu_mub)
3.18920485E-03 2 -1000014 14 # BR(~chi_30 -> ~nu_muL* nu_mu )
3.20245934E-03 2 1000016 -16 # BR(~chi_30 -> ~nu_tau1 nu_taub)
3.20245934E-03 2 -1000016 16 # BR(~chi_30 -> ~nu_tau1* nu_tau )
#
# PDG Width
DECAY 1000035 2.58585079E+00 # neutralino4 decays
# BR NDA ID1 ID2
2.15369294E-02 2 1000022 23 # BR(~chi_40 -> ~chi_10 Z )
1.85499971E-02 2 1000023 23 # BR(~chi_40 -> ~chi_20 Z )
0.00000000E+00 2 1000025 23 # BR(~chi_40 -> ~chi_30 Z )
2.49541430E-01 2 1000024 -24 # BR(~chi_40 -> ~chi_1+ W-)
2.49541430E-01 2 -1000024 24 # BR(~chi_40 -> ~chi_1- W+)
0.00000000E+00 2 1000037 -24 # BR(~chi_40 -> ~chi_2+ W-)
0.00000000E+00 2 -1000037 24 # BR(~chi_40 -> ~chi_2- W+)
6.93213268E-02 2 1000022 25 # BR(~chi_40 -> ~chi_10 h )
0.00000000E+00 2 1000022 35 # BR(~chi_40 -> ~chi_10 H )
0.00000000E+00 2 1000022 36 # BR(~chi_40 -> ~chi_10 A )
1.47602336E-01 2 1000023 25 # BR(~chi_40 -> ~chi_20 h )
0.00000000E+00 2 1000023 35 # BR(~chi_40 -> ~chi_20 H )
0.00000000E+00 2 1000023 36 # BR(~chi_40 -> ~chi_20 A )
0.00000000E+00 2 1000025 25 # BR(~chi_40 -> ~chi_30 h )
0.00000000E+00 2 1000025 35 # BR(~chi_40 -> ~chi_30 H )
0.00000000E+00 2 1000025 36 # BR(~chi_40 -> ~chi_30 A )
0.00000000E+00 2 1000024 -37 # BR(~chi_40 -> ~chi_1+ H-)
0.00000000E+00 2 -1000024 37 # BR(~chi_40 -> ~chi_1- H+)
0.00000000E+00 2 1000037 -37 # BR(~chi_40 -> ~chi_2+ H-)
0.00000000E+00 2 -1000037 37 # BR(~chi_40 -> ~chi_2- H+)
0.00000000E+00 2 1000002 -2 # BR(~chi_40 -> ~u_L ub)
0.00000000E+00 2 -1000002 2 # BR(~chi_40 -> ~u_L* u )
0.00000000E+00 2 2000002 -2 # BR(~chi_40 -> ~u_R ub)
0.00000000E+00 2 -2000002 2 # BR(~chi_40 -> ~u_R* u )
0.00000000E+00 2 1000001 -1 # BR(~chi_40 -> ~d_L db)
0.00000000E+00 2 -1000001 1 # BR(~chi_40 -> ~d_L* d )
0.00000000E+00 2 2000001 -1 # BR(~chi_40 -> ~d_R db)
0.00000000E+00 2 -2000001 1 # BR(~chi_40 -> ~d_R* d )
0.00000000E+00 2 1000004 -4 # BR(~chi_40 -> ~c_L cb)
0.00000000E+00 2 -1000004 4 # BR(~chi_40 -> ~c_L* c )
0.00000000E+00 2 2000004 -4 # BR(~chi_40 -> ~c_R cb)
0.00000000E+00 2 -2000004 4 # BR(~chi_40 -> ~c_R* c )
0.00000000E+00 2 1000003 -3 # BR(~chi_40 -> ~s_L sb)
0.00000000E+00 2 -1000003 3 # BR(~chi_40 -> ~s_L* s )
0.00000000E+00 2 2000003 -3 # BR(~chi_40 -> ~s_R sb)
0.00000000E+00 2 -2000003 3 # BR(~chi_40 -> ~s_R* s )
0.00000000E+00 2 1000006 -6 # BR(~chi_40 -> ~t_1 tb)
0.00000000E+00 2 -1000006 6 # BR(~chi_40 -> ~t_1* t )
0.00000000E+00 2 2000006 -6 # BR(~chi_40 -> ~t_2 tb)
0.00000000E+00 2 -2000006 6 # BR(~chi_40 -> ~t_2* t )
0.00000000E+00 2 1000005 -5 # BR(~chi_40 -> ~b_1 bb)
0.00000000E+00 2 -1000005 5 # BR(~chi_40 -> ~b_1* b )
0.00000000E+00 2 2000005 -5 # BR(~chi_40 -> ~b_2 bb)
0.00000000E+00 2 -2000005 5 # BR(~chi_40 -> ~b_2* b )
9.64835418E-03 2 1000011 -11 # BR(~chi_40 -> ~e_L- e+)
9.64835418E-03 2 -1000011 11 # BR(~chi_40 -> ~e_L+ e-)
3.75684470E-03 2 2000011 -11 # BR(~chi_40 -> ~e_R- e+)
3.75684470E-03 2 -2000011 11 # BR(~chi_40 -> ~e_R+ e-)
9.64835418E-03 2 1000013 -13 # BR(~chi_40 -> ~mu_L- mu+)
9.64835418E-03 2 -1000013 13 # BR(~chi_40 -> ~mu_L+ mu-)
3.75684470E-03 2 2000013 -13 # BR(~chi_40 -> ~mu_R- mu+)
3.75684470E-03 2 -2000013 13 # BR(~chi_40 -> ~mu_R+ mu-)
2.68215241E-03 2 1000015 -15 # BR(~chi_40 -> ~tau_1- tau+)
2.68215241E-03 2 -1000015 15 # BR(~chi_40 -> ~tau_1+ tau-)
1.62289809E-02 2 2000015 -15 # BR(~chi_40 -> ~tau_2- tau+)
1.62289809E-02 2 -2000015 15 # BR(~chi_40 -> ~tau_2+ tau-)
2.53796547E-02 2 1000012 -12 # BR(~chi_40 -> ~nu_eL nu_eb)
2.53796547E-02 2 -1000012 12 # BR(~chi_40 -> ~nu_eL* nu_e )
2.53796547E-02 2 1000014 -14 # BR(~chi_40 -> ~nu_muL nu_mub)
2.53796547E-02 2 -1000014 14 # BR(~chi_40 -> ~nu_muL* nu_mu )
2.54724352E-02 2 1000016 -16 # BR(~chi_40 -> ~nu_tau1 nu_taub)
2.54724352E-02 2 -1000016 16 # BR(~chi_40 -> ~nu_tau1* nu_tau )
"""
import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(-1),
pythiaHepMCVerbosity = cms.untracked.bool(False),
SLHATableForPythia8 = cms.string('%s' % SLHA_TABLE),
comEnergy = cms.double(COM_ENERGY),
crossSection = cms.untracked.double(CROSS_SECTION),
maxEventsToPrint = cms.untracked.int32(0),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring('Tune:pp = 5',
'SUSY:all = off',
#'SUSY:gg2squarkantisquark = on',
#'SUSY:qqbar2squarkantisquark= on',
'SUSY:gg2gluinogluino = on',
'SUSY:qqbar2gluinogluino = on',
'RHadrons:allow = on',
'RHadrons:allowDecay = on',
'RHadrons:setMasses = on',
'RHadrons:probGluinoball = 0.1',
'1000021:tau0 = %.1f' % CTAU),
parameterSets = cms.vstring(
'pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters')
),
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
] | |
1a6f7da3348496cfdcfd72dc5c1d6c662ac408c9 | 4d7e6eaf9c2a4749edd025d5b204289a01e469a2 | /FlaskTest/day_01/carts/__init__.py | d9788c7c7678ef467346e24f97c22e07a933c098 | [
"MIT"
] | permissive | tjhlp/FlaskProject | f5db4a020a5523516624117583aa70183dc0d520 | 2213060ec3ee2720d79a7a3f71fbcaf23a85d64d | refs/heads/master | 2020-06-24T09:33:18.592170 | 2019-08-02T07:57:02 | 2019-08-02T07:57:02 | 198,929,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | from flask import Blueprint
cart_bp = Blueprint('cart', __name__, url_prefix='/cart')
from .views import *
| [
"[email protected]"
] | |
2cffed30653acf460e4754cf7749eaf6a5e2e45b | cc0cc5268223f9c80339d1bbc2e499edc828e904 | /wallets/thrifty_wallets/manage.py | e4a50db512daef9656866ea7fe7ac714993b463d | [] | no_license | deone/thrifty | 0ba2b0445e7e9fd4cc378350de158dc6c89838b4 | a0ee4af9447b2765f4139deb87a3c1464e7c7751 | refs/heads/master | 2021-01-10T12:00:00.618968 | 2015-11-01T23:36:36 | 2015-11-01T23:36:36 | 45,340,007 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "thrifty_wallets.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
1b340ebd2248f63c39e2921394126e7da83f5247 | 8a46f370477ea9fabd36249a4f6d70226917c24b | /blogdown/plugin.py | 8ad745eddeda8d95b2c65f40734315b3b18705c3 | [
"BSD-3-Clause"
] | permissive | blogdown/blogdown | af551991013d03e3b7b033cf45687f952eb41def | 4a463d341a1fe7547a3de33f03d356e74a89569e | refs/heads/master | 2022-06-09T11:53:03.728491 | 2022-05-17T19:26:54 | 2022-05-17T19:28:16 | 5,064,814 | 5 | 4 | null | 2016-03-14T02:44:58 | 2012-07-16T08:30:38 | Python | UTF-8 | Python | false | false | 2,147 | py | # -*- coding: utf-8 -*-
"""
blogdown.plugin
~~~~~~~~~~~~~~~
Utilities for a simple plugin system.
:copyright: (c) 2015 by Thomas Gläßle
:license: BSD, see LICENSE for more details.
"""
import os
from importlib import import_module
from pkg_resources import iter_entry_points
from runpy import run_path
__all__ = [
"EntryPointLoader",
"PathLoader",
"PackageLoader",
"ChainLoader",
]
class EntryPointLoader:
"""Load plugins from specified entrypoint group."""
def __init__(self, ep_group):
self.ep_group = ep_group
def __call__(self, name):
for ep in iter_entry_points(self.ep_group, name):
yield ep.load()
class PathLoader:
"""Load plugins from specified folder."""
def __init__(self, search_path):
self.search_path = os.path.abspath(search_path)
def __call__(self, name):
module_path = os.path.join(self.search_path, name + ".py")
if not os.path.isfile(module_path):
return
module = run_path(module_path)
try:
yield module["setup"]
except KeyError:
raise AttributeError(
"Module at {0!r} can't be used as a plugin, "
"since it has no 'setup' function.".format(module_path)
)
class PackageLoader:
"""Load plugins from specified package."""
def __init__(self, package_name):
self.package_name = package_name
def __call__(self, module_name):
try:
module = import_module(self.package_name + "." + module_name)
except ImportError:
return
try:
yield module.setup
except AttributeError:
raise AttributeError(
"{0!r} can't be used as a plugin, "
"since it has no 'setup' function.".format(module)
)
class ChainLoader:
"""Load plugins from all of the sub-loaders."""
def __init__(self, loaders):
self.loaders = loaders
def __call__(self, name):
for loader in self.loaders:
for plugin in loader(name):
yield plugin
| [
"[email protected]"
] | |
c2b459c2282096b0821f5cafcca9b1d79861dd95 | 9619daf132259c31b31c9e23a15baa675ebc50c3 | /memphis.users/memphis/users/registration.py | 42d0886c2b83d4155d69ef9eca86b59d3b64b673 | [] | no_license | fafhrd91/memphis-dev | ade93c427c1efc374e0e1266382faed2f8e7cd89 | c82aac1ad3a180ff93370b429498dbb1c2e655b8 | refs/heads/master | 2016-09-05T19:32:35.109441 | 2011-08-22T06:30:43 | 2011-08-22T06:30:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 353 | py | from zope import interface
from memphis import controlpanel
from interfaces import _, ISiteRegistration
class SiteRegistration(object):
interface.implements(ISiteRegistration)
controlpanel.registerConfiglet(
'principals.registration', ISiteRegistration, SiteRegistration,
_("Site registration"), _("Site registration configuration."))
| [
"[email protected]"
] | |
82d49a9ea24a6ef56776243ff4a21c12b816e9f6 | eab72229ae04d1160704cbf90a08a582802a739c | /put_zero_den.py | 34666a2ec393a250b458da9b91999832b8c281fe | [
"MIT"
] | permissive | megatazm/Crowd-Counting | 444d39b0e3d6e98995f53badf4c073829038b6b7 | 647a055baccee2c3b6b780f38930e2ffd14d1664 | refs/heads/master | 2022-04-01T04:49:16.409675 | 2020-01-31T21:24:02 | 2020-01-31T21:24:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 471 | py | import cv2
import numpy as np
import os
import glob
from paint_den import paint
import params
path = params.input
dirs = [f for f in glob.glob(path + '/*/')]
images = []
for x in dirs:
images.append([f for f in glob.glob(x + '/*_pos.png')])
images.sort()
images = [item for sublist in images for item in sublist]
for img_path in images:
#paint(img_path, 36, 1785, 393, 75, 567, 60, 951, 1776)
paint(img_path, 0, 3234, 737, 198, 1034, 220, 1617, 3228)
| [
"[email protected]"
] | |
5ef53c9e1394c1d2f92962a9f34217c5c9134413 | 11841e8fb1e44c69ae7e50c0b85b324c4d90abda | /chutu/exmapxx.py | 5a8c550eb45031c938a4fb4f4a1d660bcf2fed3d | [] | no_license | chenlong2019/python | 1d7bf6fb60229221c79538234ad2f1a91bb03c50 | fc9e239754c5715a67cb6d743109800b64d74dc8 | refs/heads/master | 2020-12-08T11:11:49.951752 | 2020-01-10T04:58:29 | 2020-01-10T04:59:50 | 232,968,232 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,386 | py | # coding=utf-8
import arcpy
import os,glob,time
res=200
# 模板mxd文档路径,生成mxd文档路径
def createMxd(modelpath,mxdpath,symbologyLayer,jpgpath,string,lyrfile):
mxd=arcpy.mapping.MapDocument(modelpath)
if(os.path.exists(mxdpath)):
mxd=arcpy.mapping.MapDocument(mxdpath)
print("location as "+mxdpath)
arcpy.AddWarning("该文件已经存在")
else:
mxd.saveACopy(mxdpath)
print(mxdpath+" saveAs succefssful")
if(os.path.exists(mxdpath)):
mxd=arcpy.mapping.MapDocument(mxdpath)
print("location in "+mxdpath)
# 查找数据框
df = arcpy.mapping.ListDataFrames(mxd, "*")[0]
# 增加底图
#symbologyLayer = "D:\\cs\\model\\lyr\\Rectangle_#1_常熟卫图_Level_16.tif.lyr"
#"F:\\xinxiang\\fil\\20190817mydPM25.tif"
rasLayer=arcpy.mapping.Layer(lyrfile)
symbologyLayr=arcpy.mapping.Layer(symbologyLayer)
# rasLayer.symbology.
arcpy.ApplySymbologyFromLayer_management (rasLayer,symbologyLayr)
arcpy.mapping.AddLayer(df, rasLayer, "AUTO_ARRANGE")
arcpy.AddMessage(str(time.ctime())+":"+symbologyLayer+"添加成功。。。")
for legend in arcpy.mapping.ListLayoutElements(mxd, "LEGEND_ELEMENT", "Legend"):
print(legend.items)
arcpy.RefreshActiveView()
for legend in arcpy.mapping.ListLayoutElements(mxd, "LEGEND_ELEMENT", "Legend"):
print(legend.items)
mxd.save()
arcpy.mapping.ExportToJPEG(mxd, jpgpath, resolution = res)
if __name__ == '__main__':
rootpath=u'F:\\xx\\中心城区'
pathDir = os.listdir(rootpath)
try:
os.makedirs(u'F:\\xx\\AutoMap\\result\\mxd\\o3')
except:
pass
try:
os.makedirs(u'F:\\xx\\AutoMap\\result\\JpgOutput')
except:
pass
for filename in pathDir:
if filename[-4:].lower() == '.tif':
# o3
if filename[-5:-4].lower() == '3':
try:
filepath=os.path.join(rootpath,filename)
print(filename)
mxdpath=u"F:\\xx\\AutoMap\\result\\mxd\\xinxiang{}.mxd".format(filename[:-4])
modelpath=u"F:\\xx\\AutoMap\\Mxd\\xinxiang_O3.mxd"
# mxd模板文件路径
#modelpath=arcpy.GetParameterAsText(0)
# 输出mxd文件路径
#mxdpath=arcpy.GetParameterAsText(1)
# tif文件路径
symbologyLayer=u'F:\\xx\\Lyr\\C_20191111modo356.lyr'
#filepath = "D:\\cs\\data\\pic3"
# shp文件夹路径
#filepath=arcpy.GetParameterAsText(3)
# jpg输出路径
jpgpath=u"F:\\xx\\AutoMap\\result\\JpgOutput\\{}.jpg".format(filename[:-4])
# jpgpath=arcpy.GetParameterAsText(4)
arcpy.AddMessage('')
arcpy.AddMessage(str(time.ctime())+"输出开始!")
createMxd(modelpath,mxdpath,symbologyLayer,jpgpath,'',filepath)
print('successful')
arcpy.AddMessage(str(time.ctime())+"输出完成!")
except Exception as e:
print(e.message) | [
"[email protected]"
] | |
10c811755bbeff6b27cebbc77dbe356bb64edc11 | 15ed3ab4510677e6df9b11af8fd7a36fc6d826fc | /v1/og_mc_3/tau=0.01/eta=0.04/library/mc6.py | a573e2553235d58cd70aaa9530cdec9d32c14c5f | [] | no_license | pe-ge/Computational-analysis-of-memory-capacity-in-echo-state-networks | 929347575538de7015190d35a7c2f5f0606235f2 | 85873d8847fb2876cc8a6a2073c2d1779ea1b20b | refs/heads/master | 2020-04-02T08:08:38.595974 | 2018-01-17T08:12:26 | 2018-01-17T08:12:26 | 61,425,490 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,280 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
mc6.py
Created 21.3.2015
Based on mc5.py
Goal: Measuring Memory Capacity of reservoirs.
Changes:
- removed correlation coefficient correction MC <- MC - q / iterations_coef_measure
- added input-to-output connections
"""
from numpy import random, zeros, tanh, dot, linalg, \
corrcoef, average, std, sqrt, hstack
from library.lyapunov import lyapunov_exp
import scipy.linalg
def memory_capacity(W, WI, memory_max=None,
iterations=1000, iterations_skipped=None, iterations_coef_measure=1000,
runs=1, input_dist=(-1., 1.),
use_input=False, target_later=False, calc_lyapunov=False):
"""Calculates memory capacity of a NN
[given by its input weights WI and reservoir weights W].
W = q x q matrix storing hidden reservoir weights
WI = q x 1 vector storing input weights
Returns: a non-negative real number MC
MC: memory capacity sum for histories 1..MEMORY_MAX
"""
# matrix shape checks
if len(WI.shape) != 1:
raise Exception("input matrix WI must be vector-shaped!")
q, = WI.shape
if W.shape != (q, q):
raise Exception("W and WI matrix sizes do not match")
if memory_max is None:
memory_max = q
if iterations_skipped is None:
iterations_skipped = max(memory_max, 100) + 1
iterations_measured = iterations - iterations_skipped
dist_input = lambda: random.uniform(input_dist[0], input_dist[1], iterations)
# vector initialization
X = zeros(q)
if use_input:
S = zeros([q + 1, iterations_measured])
else:
S = zeros([q, iterations_measured])
# generate random input
u = dist_input() # all input; dimension: [iterations, 1]
# run 2000 iterations and fill the matrices D and S
for it in range(iterations):
X = tanh(dot(W, X) + dot(WI, u[it]))
if it >= iterations_skipped:
# record the state of reservoir activations X into S
if use_input:
S[:, it - iterations_skipped] = hstack([X, u[it]])
else:
S[:, it - iterations_skipped] = X
# prepare matrix D of desired values (that is, shifted inputs)
assert memory_max < iterations_skipped
D = zeros([memory_max, iterations_measured])
if target_later:
# if we allow direct input-output connections, there is no point in measuring 0-delay corr. coef. (it is always 1)
for h in range(memory_max):
D[h,:] = u[iterations_skipped - (h+1) : iterations - (h+1)]
else:
for h in range(memory_max):
D[h,:] = u[iterations_skipped - h : iterations - h]
# calculate pseudoinverse S+ and with it, the matrix WO
S_PINV = scipy.linalg.pinv(S)
WO = dot(D, S_PINV)
# do a new run for an unbiased test of quality of our newly trained WO
# we skip memory_max iterations to have large enough window
MC = zeros([runs, memory_max]) # here we store memory capacity
LE = zeros(runs) # lyapunov exponent
for run in range(runs):
u = random.uniform(input_dist[0], input_dist[1], iterations_coef_measure + memory_max)
X = zeros(q)
o = zeros([memory_max, iterations_coef_measure]) # 200 x 1000
for it in range(iterations_coef_measure + memory_max):
X = tanh(dot(W, X) + dot(WI, u[it]))
if it >= memory_max:
# we calculate output nodes using WO
if use_input:
o[:, it - memory_max] = dot(WO, hstack([X, u[it]]))
else:
o[:, it - memory_max] = dot(WO, X)
# correlate outputs with inputs (shifted)
for h in range(memory_max):
k = h + 1
if target_later:
cc = corrcoef(u[memory_max - k : memory_max + iterations_coef_measure - k], o[h, : ]) [0, 1]
else:
cc = corrcoef(u[memory_max - h : memory_max + iterations_coef_measure - h], o[h, : ]) [0, 1]
MC[run, h] = cc * cc
# calculate lyapunov
if calc_lyapunov:
LE[run] = lyapunov_exp(W, WI, X)
return sum(average(MC, axis=0)), LE
def main():
print("I am a library. Please don't run me directly.")
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
16e0ae410ab9c5056f793ef00a29456e3926cbfc | 3b9bf497cd29cea9c24462e0411fa8adbfa6ba60 | /leetcode/Problems/116--Populating-Next-Right-Pointers-in-Each-Node-Medium.py | 2e8b81530cf65226d4d6de3352b0c75892188c4a | [] | no_license | niteesh2268/coding-prepation | 918823cb7f4965bec096ec476c639a06a9dd9692 | 19be0766f6b9c298fb32754f66416f79567843c1 | refs/heads/master | 2023-01-02T05:30:59.662890 | 2020-10-17T13:12:34 | 2020-10-17T13:12:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 705 | py | """
# Definition for a Node.
class Node:
def __init__(self, val: int = 0, left: 'Node' = None, right: 'Node' = None, next: 'Node' = None):
self.val = val
self.left = left
self.right = right
self.next = next
"""
class Solution:
def connect(self, root: 'Node') -> 'Node':
if not root:
return None
def assign(r1, r2):
if not r1:
return
r1.next = r2
assign(r1.left, r1.right)
if r2:
assign(r1.right, r2.left)
assign(r2.left, r2.right)
assign(r2.right, None)
assign(root.left, root.right)
return root | [
"[email protected]"
] | |
fb3ec15864cfb1866c1711d0586b7d7b0fff7090 | ad13583673551857615498b9605d9dcab63bb2c3 | /output/models/ms_data/particles/particles_ie013_xsd/__init__.py | e0ffd1abcba9a881fbd645379ab76771f0c5d955 | [
"MIT"
] | permissive | tefra/xsdata-w3c-tests | 397180205a735b06170aa188f1f39451d2089815 | 081d0908382a0e0b29c8ee9caca6f1c0e36dd6db | refs/heads/main | 2023-08-03T04:25:37.841917 | 2023-07-29T17:10:13 | 2023-07-30T12:11:13 | 239,622,251 | 2 | 0 | MIT | 2023-07-25T14:19:04 | 2020-02-10T21:59:47 | Python | UTF-8 | Python | false | false | 169 | py | from output.models.ms_data.particles.particles_ie013_xsd.particles_ie013 import (
Base,
Doc,
Testing,
)
__all__ = [
"Base",
"Doc",
"Testing",
]
| [
"[email protected]"
] | |
0cd0e4e8ac5f482d0c574c61b50f82a0ddd477af | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-1/f30589c91d8946586faff2c994e99395239bd50b-<main>-fix.py | 1d51b1b9cb0c17280c516c955697eab9c96e41df | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 935 | py |
def main():
argument_spec = purefa_argument_spec()
argument_spec.update(dict(name=dict(required=True), eradicate=dict(default='false', type='bool'), state=dict(default='present', choices=['present', 'absent']), size=dict()))
required_if = [('state', 'present', ['size'])]
module = AnsibleModule(argument_spec, required_if=required_if, supports_check_mode=True)
if (not HAS_PURESTORAGE):
module.fail_json(msg='purestorage sdk is required for this module in volume')
state = module.params['state']
array = get_system(module)
volume = get_volume(module, array)
if ((state == 'present') and (not volume)):
create_volume(module, array)
elif ((state == 'present') and volume):
update_volume(module, array)
elif ((state == 'absent') and volume):
delete_volume(module, array)
elif ((state == 'absent') and (not volume)):
module.exit_json(changed=False)
| [
"[email protected]"
] | |
550323588bb7c91d6f193aa3a636c51d6a3b730e | 07f7e1296e528e83d570ee7f5c75ff83e331d949 | /cufacesearch/cufacesearch/api/api.py | 0a7465d9ef4781c7538c68ae161aa902f996b4e7 | [
"Apache-2.0"
] | permissive | wuqixiaobai/ColumbiaImageSearch | e0ab1ed8ab9724b70838085a37c3cd06638e93b2 | a4c4816174c522c844b08feb1c9ddcad5ca2f6db | refs/heads/master | 2020-03-08T10:38:13.669538 | 2018-03-22T19:56:34 | 2018-03-22T19:56:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,861 | py | import sys
import time
import json
from datetime import datetime
from flask import Markup, flash, request, render_template, make_response
from flask_restful import Resource
from ..imgio.imgio import ImageMIMETypes, get_SHA1_img_type_from_B64, get_SHA1_img_info_from_buffer, buffer_to_B64
from ..detector.utils import build_bbox_str_list
from socket import *
sock = socket()
sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
global_searcher = None
global_start_time = None
input_type = "image"
class APIResponder(Resource):
def __init__(self):
self.searcher = global_searcher
self.start_time = global_start_time
self.input_type = input_type
# This could be loaded from config?
self.default_no_blur = True
self.default_max_height = 120
# how to blur canvas images but keep the face clean?
self.valid_options = ["near_dup", "near_dup_th", "no_blur", "detect_only", "max_height", "max_returned"]
def get(self, mode):
query = request.args.get('data')
#query = unicode(request.args.get('data'), "utf8")
options = request.args.get('options')
if query:
print "[get] received parameters: {}".format(request.args.keys())
print "[get] received data: "+query.encode('ascii','ignore')
print "[get] received options: {}".format(options)
return self.process_query(mode, query, options)
else:
return self.process_mode(mode)
def put(self, mode):
return self.put_post(mode)
def post(self, mode):
return self.put_post(mode)
def put_post(self, mode):
print("[put/post] received parameters: {}".format(request.form.keys()))
print("[put/post] received request: {}".format(request))
query = request.form['data']
try:
options = request.form['options']
except:
options = None
print("[put/post] received data of length: {}".format(len(query)))
print("[put/post] received options: {}".format(options))
if not query:
return {'error': 'no data received'}
else:
return self.process_query(mode, query, options)
def process_mode(self, mode):
if mode == "status":
return self.status()
elif mode == "refresh":
return self.refresh()
else:
return {'error': 'unknown_mode: '+str(mode)+'. Did you forget to give \'data\' parameter?'}
def process_query(self, mode, query, options=None):
start = time.time()
if mode == "byURL":
resp = self.search_byURL(query, options)
elif mode == "bySHA1":
resp = self.search_bySHA1(query, options)
elif mode == "byPATH":
resp = self.search_byPATH(query, options)
elif mode == "byB64":
resp = self.search_byB64(query, options)
elif mode == "view_image_sha1":
return self.view_image_sha1(query, options)
elif mode == "view_similar_byURL":
query_reponse = self.search_byURL(query, options)
return self.view_similar_query_response('URL', query, query_reponse, options)
elif mode == "view_similar_byB64":
query_reponse = self.search_byB64(query, options)
return self.view_similar_query_response('B64', query, query_reponse, options)
elif mode == "view_similar_byPATH":
query_reponse = self.search_byPATH(query, options)
return self.view_similar_query_response('PATH', query, query_reponse, options)
elif mode == "view_similar_bySHA1":
query_reponse = self.search_bySHA1(query, options)
return self.view_similar_query_response('SHA1', query, query_reponse, options)
# elif mode == "byURL_nocache":
# resp = self.search_byURL_nocache(query, options)
# elif mode == "bySHA1_nocache":
# resp = self.search_bySHA1_nocache(query, options)
# elif mode == "byB64_nocache":
# resp = self.search_byB64_nocache(query, options)
else:
return {'error': 'unknown_mode: '+str(mode)}
resp['Timing'] = time.time()-start
return resp
def get_options_dict(self, options):
errors = []
options_dict = dict()
if options:
try:
options_dict = json.loads(options)
except Exception as inst:
err_msg = "[get_options: error] Could not load options from: {}. {}".format(options, inst)
print(err_msg)
errors.append(err_msg)
for k in options_dict:
if k not in self.valid_options:
err_msg = "[get_options: error] Unknown option {}".format(k)
print(err_msg)
errors.append(err_msg)
return options_dict, errors
def append_errors(self, outp, errors=[]):
if errors:
e_d = dict()
if 'errors' in outp:
e_d = outp['errors']
for i,e in enumerate(errors):
e_d['error_{}'.format(i)] = e
outp['errors'] = e_d
return outp
def search_byURL(self, query, options=None):
query_urls = self.get_clean_urls_from_query(query)
options_dict, errors = self.get_options_dict(options)
#outp = self.searcher.search_image_list(query_urls, options_dict)
outp = self.searcher.search_imageURL_list(query_urls, options_dict)
outp_we = self.append_errors(outp, errors)
sys.stdout.flush()
return outp_we
def search_byPATH(self, query, options=None):
query_paths = query.split(',')
options_dict, errors = self.get_options_dict(options)
outp = self.searcher.search_image_path_list(query_paths, options_dict)
outp_we = self.append_errors(outp, errors)
sys.stdout.flush()
return outp_we
def search_bySHA1(self, query, options=None):
query_sha1s = query.split(',')
options_dict, errors = self.get_options_dict(options)
# get the image URLs/paths from HBase and search
# TODO: should we actually try to get features?
rows_imgs = self.searcher.indexer.get_columns_from_sha1_rows(query_sha1s, columns=[self.searcher.img_column])
# TODO: what shoudl we do if we get less rows_imgs than query_sha1s?
query_imgs = [row[1][self.searcher.img_column] for row in rows_imgs]
if self.searcher.file_input:
outp = self.searcher.search_image_path_list(query_imgs, options_dict)
else:
outp = self.searcher.search_imageURL_list(query_imgs, options_dict)
outp_we = self.append_errors(outp, errors)
sys.stdout.flush()
return outp_we
def search_byB64(self, query, options=None):
query_b64s = [str(x) for x in query.split(',') if not x.startswith('data:')]
options_dict, errors = self.get_options_dict(options)
outp = self.searcher.search_imageB64_list(query_b64s, options_dict)
outp_we = self.append_errors(outp, errors)
sys.stdout.flush()
return outp_we
def refresh(self):
# Force check if new images are available in HBase
# Could be called if data needs to be as up-to-date as it can be...
if self.searcher:
self.searcher.load_codes(full_refresh=True)
return {'refresh': 'just run a full refresh'}
def status(self):
# prepare output
status_dict = {'status': 'OK'}
status_dict['API_start_time'] = self.start_time.isoformat(' ')
status_dict['API_uptime'] = str(datetime.now()-self.start_time)
# Try to refresh on status call but at most every 4 hours
if self.searcher.last_refresh:
last_refresh_time = self.searcher.last_refresh
else:
last_refresh_time = self.searcher.indexer.last_refresh
diff_time = datetime.now()-last_refresh_time
if self.searcher and diff_time.total_seconds() > 3600*4:
self.searcher.load_codes()
last_refresh_time = self.searcher.last_refresh
status_dict['last_refresh_time'] = last_refresh_time.isoformat(' ')
status_dict['nb_indexed'] = str(self.searcher.searcher.get_nb_indexed())
return status_dict
#TODO: Deal with muliple query images with an array parameter request.form.getlist(key)
@staticmethod
def get_clean_urls_from_query(query):
""" To deal with comma in URLs.
"""
# tmp_query_urls = ['http'+str(x) for x in query.split('http') if x]
# fix issue with unicode in URL
from ..common.dl import fixurl
tmp_query_urls = [fixurl('http' + x) for x in query.split('http') if x]
query_urls = []
for x in tmp_query_urls:
if x[-1] == ',':
query_urls.append(x[:-1])
else:
query_urls.append(x)
print "[get_clean_urls_from_query: info] {}".format(query_urls)
return query_urls
def get_image_str(self, row):
return "<img src=\"{}\" title=\"{}\" class=\"img_blur\">".format(row[1]["info:s3_url"],row[0])
def view_image_sha1(self, query, options=None):
# Not really used anymore...
query_sha1s = [str(x) for x in query.split(',')]
rows = self.searcher.indexer.get_columns_from_sha1_rows(query_sha1s, ["info:s3_url"])
images_str = ""
# TODO: change this to actually just produce a list of images to fill a new template
for row in rows:
images_str += self.get_image_str(row)
images = Markup(images_str)
flash(images)
headers = {'Content-Type': 'text/html'}
return make_response(render_template('view_images.html'),200,headers)
def view_similar_query_response(self, query_type, query, query_response, options=None):
if query_type == 'B64':
# get :
# - sha1 to be able to map to query response
# - image type to make sure the image is displayed properly
# - embedded format for each b64 query
# TODO: use array parameter
query_list = query.split(',')
query_b64_infos = [get_SHA1_img_type_from_B64(q) for q in query_list if not q.startswith('data:')]
query_urls_map = dict()
for img_id, img_info in enumerate(query_b64_infos):
query_urls_map[img_info[0]] = "data:"+ImageMIMETypes[img_info[1]]+";base64,"+str(query_list[img_id])
elif query_type == "PATH" or (query_type == "SHA1" and self.searcher.file_input):
# Encode query in B64
query_infos = []
query_list = query.split(',')
# Get images paths from sha1s
if query_type == 'SHA1' and self.searcher.file_input:
rows_imgs = self.searcher.indexer.get_columns_from_sha1_rows(query_list, columns=[self.searcher.img_column])
query_list = [row[1][self.searcher.img_column] for row in rows_imgs]
query_list_B64 = []
for q in query_list:
with open(q,'rb') as img_buffer:
query_infos.append(get_SHA1_img_info_from_buffer(img_buffer))
query_list_B64.append(buffer_to_B64(img_buffer))
query_urls_map = dict()
for img_id, img_info in enumerate(query_infos):
query_urls_map[img_info[0]] = "data:" + ImageMIMETypes[img_info[1]] + ";base64," + str(query_list_B64[img_id])
elif query_type == "URL" or (query_type == "SHA1" and not self.searcher.file_input):
# URLs should already be in query response
pass
else:
print "[view_similar_query_response: error] Unknown query_type: {}".format(query_type)
return None
# Get errors
options_dict, errors_options = self.get_options_dict(options)
# Parse similar faces response
all_sim_faces = query_response[self.searcher.do.map['all_similar_'+self.input_type+'s']]
search_results = []
print "[view_similar_query_response: log] len(sim_images): {}".format(len(all_sim_faces))
for i in range(len(all_sim_faces)):
# Parse query face, and build face tuple (sha1, url/b64 img, face bounding box)
query_face = all_sim_faces[i]
#print "query_face [{}]: {}".format(query_face.keys(), query_face)
sys.stdout.flush()
query_sha1 = query_face[self.searcher.do.map['query_sha1']]
if query_type == "B64" or query_type == "PATH" or (query_type == "SHA1" and self.searcher.file_input):
query_face_img = query_urls_map[query_sha1]
else:
query_face_img = query_face[self.searcher.do.map['query_url']].decode("utf8")
#query_face_img = query_face[self.searcher.do.map['query_url']]
if self.searcher.do.map['query_'+self.input_type] in query_face:
query_face_bbox = query_face[self.searcher.do.map['query_'+self.input_type]]
query_face_bbox_compstr = build_bbox_str_list(query_face_bbox)
else:
query_face_bbox_compstr = []
img_size = None
if self.searcher.do.map['img_info'] in query_face:
img_size = query_face[self.searcher.do.map['img_info']][1:]
out_query_face = (query_sha1, query_face_img, query_face_bbox_compstr, img_size)
# Parse similar faces
similar_faces = query_face[self.searcher.do.map['similar_'+self.input_type+'s']]
#print similar_faces[self.searcher.do.map['number_faces']]
out_similar_faces = []
for j in range(similar_faces[self.searcher.do.map['number_'+self.input_type+'s']]):
# build face tuple (sha1, url/b64 img, face bounding box, distance) for one similar face
osface_sha1 = similar_faces[self.searcher.do.map['image_sha1s']][j]
#if query_type == "PATH":
if self.searcher.file_input:
with open(similar_faces[self.searcher.do.map['cached_image_urls']][j], 'rb') as img_buffer:
img_info = get_SHA1_img_info_from_buffer(img_buffer)
img_B64 = buffer_to_B64(img_buffer)
osface_url = "data:" + ImageMIMETypes[img_info[1]] + ";base64," + str(img_B64)
else:
osface_url = similar_faces[self.searcher.do.map['cached_image_urls']][j]
osface_bbox_compstr = None
if self.input_type != "image":
osface_bbox = similar_faces[self.searcher.do.map[self.input_type+'s']][j]
osface_bbox_compstr = build_bbox_str_list(osface_bbox)
osface_img_size = None
if self.searcher.do.map['img_info'] in similar_faces:
osface_img_size = similar_faces[self.searcher.do.map['img_info']][j][1:]
osface_dist = similar_faces[self.searcher.do.map['distances']][j]
out_similar_faces.append((osface_sha1, osface_url, osface_bbox_compstr, osface_dist, osface_img_size))
# build output
search_results.append((out_query_face, [out_similar_faces]))
# Prepare settings
settings = dict()
settings["no_blur"] = self.default_no_blur
settings["max_height"] = self.default_max_height
if "no_blur" in options_dict:
settings["no_blur"] = options_dict["no_blur"]
if "max_height" in options_dict:
settings["max_height"] = options_dict["max_height"]
headers = {'Content-Type': 'text/html'}
#print search_results
sys.stdout.flush()
if self.input_type != "image":
return make_response(render_template('view_similar_faces_wbbox.html',
settings=settings,
search_results=search_results),
200, headers)
else:
return make_response(render_template('view_similar_images.html',
settings=settings,
search_results=search_results),
200, headers)
| [
"[email protected]"
] | |
25b0faff57a134389ac668ba40d1d3421f140816 | 08ee36e0bb1c250f7f2dfda12c1a73d1984cd2bc | /src/mnistk/networks/conv1dtanh_24.py | c8888a08e65cd985171dd5b7947bf12cd3c0dedf | [] | no_license | ahgamut/mnistk | 58dadffad204602d425b18549e9b3d245dbf5486 | 19a661185e6d82996624fc6fcc03de7ad9213eb0 | refs/heads/master | 2021-11-04T07:36:07.394100 | 2021-10-27T18:37:12 | 2021-10-27T18:37:12 | 227,103,881 | 2 | 1 | null | 2020-02-19T22:07:24 | 2019-12-10T11:33:09 | Python | UTF-8 | Python | false | false | 1,688 | py | # -*- coding: utf-8 -*-
"""
conv1dtanh_24.py
:copyright: (c) 2019 by Gautham Venkatasubramanian.
:license: MIT
"""
import torch
from torch import nn
class Conv1dTanh_24(nn.Module):
def __init__(self):
nn.Module.__init__(self)
self.f0 = nn.Conv1d(in_channels=16, out_channels=22, kernel_size=(11,), stride=(1,), padding=(0,), dilation=(1,), groups=1, bias=True, padding_mode='zeros')
self.f1 = nn.Conv1d(in_channels=22, out_channels=16, kernel_size=(38,), stride=(1,), padding=(0,), dilation=(1,), groups=1, bias=False, padding_mode='zeros')
self.f2 = nn.Conv1d(in_channels=16, out_channels=27, kernel_size=(2,), stride=(1,), padding=(0,), dilation=(1,), groups=1, bias=False, padding_mode='zeros')
self.f3 = nn.Tanh()
self.f4 = nn.Conv1d(in_channels=27, out_channels=35, kernel_size=(1,), stride=(1,), padding=(0,), dilation=(1,), groups=1, bias=False, padding_mode='zeros')
self.f5 = nn.Tanh()
self.f6 = nn.Conv1d(in_channels=35, out_channels=30, kernel_size=(1,), stride=(1,), padding=(0,), dilation=(1,), groups=1, bias=False, padding_mode='zeros')
self.f7 = nn.Conv1d(in_channels=30, out_channels=10, kernel_size=(1,), stride=(1,), padding=(0,), dilation=(1,), groups=1, bias=False, padding_mode='zeros')
self.f8 = nn.LogSoftmax(dim=1)
def forward(self, *inputs):
x = inputs[0]
x = x.view(x.shape[0],16,49)
x = self.f0(x)
x = self.f1(x)
x = self.f2(x)
x = self.f3(x)
x = self.f4(x)
x = self.f5(x)
x = self.f6(x)
x = self.f7(x)
x = x.view(x.shape[0],10)
x = self.f8(x)
return x
| [
"[email protected]"
] | |
20223d251cf7c1ee244f3ff6bda6aeac1170471e | 02842943a8e8c5c53f5f8146234271446f1203ce | /102_morphological_analysis.py | eb9ada5fd4c6b12d0915f447c3a6585661eacd1e | [
"CC0-1.0"
] | permissive | utda/portal_keyword | e38856747bdd413519fe249a2bf4a7c49011bc37 | b83b5a70e766235361ec34e5d5d45610d649c248 | refs/heads/master | 2022-12-12T07:03:34.552994 | 2020-06-12T08:55:56 | 2020-06-12T08:55:56 | 252,589,741 | 0 | 0 | CC0-1.0 | 2022-09-30T19:00:11 | 2020-04-02T23:48:40 | Python | UTF-8 | Python | false | false | 1,672 | py |
# text-mining.py
# python解析器janomeをインポート - 1
from janome.tokenizer import Tokenizer
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction.text import TfidfVectorizer
import json
import os
import requests
import configparser
import numpy as np
import glob
import csv
import os.path
# 形態素解析用オブジェクトの生成 - 2
text = Tokenizer()
idir = "data/text"
odir = "data/ma"
os.makedirs(odir, exist_ok=True)
files = glob.glob(odir+'/*.txt')
for i in range(len(files)):
if i % 100 == 0:
print(i+1, len(files))
file = files[i]
output = file.replace(idir, odir)
# txtファイルからデータの読み込み - 3
text_file = open(file)
bindata = text_file.read()
txt = bindata
# テキストを一行ごとに処理 - 5
word_dic = {}
lines_1 = txt.split("\r\n")
for line in lines_1:
malist = text.tokenize(line)
for w in malist:
word = w.surface
ps = w.part_of_speech # 品詞 - 6
if ps.find("名詞") < 0:
continue # 名詞だけをカウント - 7
if not word.isalpha():
continue
if not word in word_dic:
word_dic[word] = 0
word_dic[word] += 1
if "『" in word:
print(word)
# よく使われる単語を表示 - 8
keys = sorted(word_dic.items(), key=lambda x: x[1], reverse=True)
f2 = open(output, 'w')
writer = csv.writer(f2, lineterminator='\n')
writer.writerow(["word", "cnt"])
for word, cnt in keys:
writer.writerow([word, cnt])
f2.close()
| [
"[email protected]"
] | |
69b79f560be12c0e9e42677a4b97215c43d4af93 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adjectives/_cuneiform.py | aa5197e96b7bd8efc91b06c79ac4112f74a72e7c | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 523 | py |
#calss header
class _CUNEIFORM():
def __init__(self,):
self.name = "CUNEIFORM"
self.definitions = [u'of a form of writing used for over 3,000 years until the 1st century BC in the ancient countries of Western Asia', u'pointed at one end and wide at the other: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'adjectives'
def run(self, obj1, obj2):
self.jsondata[obj2] = {}
self.jsondata[obj2]['properties'] = self.name.lower()
return self.jsondata
| [
"[email protected]"
] | |
c695fddcefdc0efae0816934bae5aaba3b17ab7c | 54ddb3f38cd09ac25213a7eb8743376fe778fee8 | /topic_02_syntax/hw/tests/logic_1_arithmetic_test.py | 24a92c5b8bec9f07cd079054c5fbfa6afd539e1c | [] | no_license | ryndovaira/leveluppythonlevel1_300321 | dbfd4ee41485870097ee490f652751776ccbd7ab | 0877226e6fdb8945531775c42193a90ddb9c8a8b | refs/heads/master | 2023-06-06T07:44:15.157913 | 2021-06-18T11:53:35 | 2021-06-18T11:53:35 | 376,595,962 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,327 | py | import pytest
from topic_02_syntax.hw.logic_1_arithmetic import arithmetic
params = [
(0, 0, '+', 0),
(1, 0, '+', 1),
(0, 1, '+', 1),
(100, 100, '+', 200),
(100, -100, '+', 0),
(-100, 100, '+', 0),
(-100, -100, '+', -200),
(0, 0, '-', 0),
(1, 0, '-', 1),
(0, 1, '-', -1),
(100, 100, '-', 0),
(100, -100, '-', 200),
(-100, 100, '-', -200),
(-100, -100, '-', 0),
(0, 0, '*', 0),
(1, 0, '*', 0),
(0, 1, '*', 0),
(100, 100, '*', 10000),
(100, -100, '*', -10000),
(-100, 100, '*', -10000),
(-100, -100, '*', 10000),
(0, 1, '/', 0),
(1, 1, '/', 1),
(100, 100, '/', 1),
(100, -100, '/', -1),
(-100, 100, '/', -1),
(-100, -100, '/', 1),
(0, 1, '=', "Unknown operator"),
(1, 1, '%', "Unknown operator"),
(100, 100, '#', "Unknown operator"),
(100, -100, '.', "Unknown operator"),
(-100, 100, '0', "Unknown operator"),
(-100, -100, '&', "Unknown operator"),
]
ids = ["(%s) %s (%s) == (%s)" % (num1, op, num2, expected) for (num1, num2, op, expected) in params]
@pytest.mark.parametrize(argnames="num1, num2, op, expected",
argvalues=params,
ids=ids)
def test_arithmetic(num1, num2, op, expected):
assert arithmetic(num1, num2, op) == expected
| [
"[email protected]"
] | |
cd7ca0848790ab8b6fa8f0a2dca430f44d1e1aea | 362224f8a23387e8b369b02a6ff8690c200a2bce | /django/django_orm/courses/courses_app/migrations/0004_auto_20210507_1257.py | 44b3c5750ec2e09c2a574516f4e4ef23d781992c | [] | no_license | Helenyixuanwang/python_stack | ac94c7c532655bf47592a8453738daac10f220ad | 97fbc77e3971b5df1fe3e79652b294facf8d6cee | refs/heads/main | 2023-06-11T02:17:27.277551 | 2021-06-21T17:01:09 | 2021-06-21T17:01:09 | 364,336,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 630 | py | # Generated by Django 2.2 on 2021-05-07 19:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('courses_app', '0003_auto_20210507_1107'),
]
operations = [
migrations.RemoveField(
model_name='description',
name='course',
),
migrations.AddField(
model_name='course',
name='description',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='course', to='courses_app.Description'),
),
]
| [
"[email protected]"
] | |
7ee6dfd65f6902adeb07ab3e77ae072964561905 | b15d2787a1eeb56dfa700480364337216d2b1eb9 | /samples/cli/accelbyte_py_sdk_cli/iam/_admin_get_list_country_age_restriction_v3.py | 74d2f077e0a0ec3dedd98d7a8e75cccd7aeadc41 | [
"MIT"
] | permissive | AccelByte/accelbyte-python-sdk | dedf3b8a592beef5fcf86b4245678ee3277f953d | 539c617c7e6938892fa49f95585b2a45c97a59e0 | refs/heads/main | 2023-08-24T14:38:04.370340 | 2023-08-22T01:08:03 | 2023-08-22T01:08:03 | 410,735,805 | 2 | 1 | MIT | 2022-08-02T03:54:11 | 2021-09-27T04:00:10 | Python | UTF-8 | Python | false | false | 2,385 | py | # Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template_file: python-cli-command.j2
# AGS Iam Service (6.2.0)
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
import json
import yaml
from typing import Optional
import click
from .._utils import login_as as login_as_internal
from .._utils import to_dict
from accelbyte_py_sdk.api.iam import (
admin_get_list_country_age_restriction_v3 as admin_get_list_country_age_restriction_v3_internal,
)
from accelbyte_py_sdk.api.iam.models import ModelCountryV3Response
from accelbyte_py_sdk.api.iam.models import RestErrorResponse
@click.command()
@click.option("--namespace", type=str)
@click.option("--login_as", type=click.Choice(["client", "user"], case_sensitive=False))
@click.option("--login_with_auth", type=str)
@click.option("--doc", type=bool)
def admin_get_list_country_age_restriction_v3(
namespace: Optional[str] = None,
login_as: Optional[str] = None,
login_with_auth: Optional[str] = None,
doc: Optional[bool] = None,
):
if doc:
click.echo(admin_get_list_country_age_restriction_v3_internal.__doc__)
return
x_additional_headers = None
if login_with_auth:
x_additional_headers = {"Authorization": login_with_auth}
else:
login_as_internal(login_as)
result, error = admin_get_list_country_age_restriction_v3_internal(
namespace=namespace,
x_additional_headers=x_additional_headers,
)
if error:
raise Exception(f"AdminGetListCountryAgeRestrictionV3 failed: {str(error)}")
click.echo(yaml.safe_dump(to_dict(result), sort_keys=False))
admin_get_list_country_age_restriction_v3.operation_id = (
"AdminGetListCountryAgeRestrictionV3"
)
admin_get_list_country_age_restriction_v3.is_deprecated = False
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.