blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d3770de2ae95f5979154c27ae9ccd77472d0e4d2
|
0a6f284b1a7c8b16911ebf33076abc38778c752f
|
/app/run.py
|
37911045108a9bf98a86cd405ae50114df2a13ca
|
[
"Apache-2.0"
] |
permissive
|
branky/blockd3
|
2298b3eafd1b9c50b0374dd1456c0fcdf2068fab
|
27e78fd89f44af95ad65b1203c02156db64333d0
|
refs/heads/master
| 2020-12-25T09:00:35.047437 | 2012-11-19T06:08:49 | 2012-11-19T06:08:49 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,026 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import logging
from ghost import GhostTestCase, Ghost
from app import make_app
app = make_app("test")
PORT = 5000
base_url = "http://localhost:%s/dist/" % PORT
class Blockd3GhostTest(GhostTestCase):
port = PORT
display = False
log_level = logging.INFO
def __new__(cls, *args, **kwargs):
"""Creates Ghost instance."""
if not hasattr(cls, 'ghost'):
cls.ghost = Ghost(display=cls.display,
wait_timeout=10,
viewport_size=cls.viewport_size,
log_level=cls.log_level)
return super(Blockd3GhostTest, cls).__new__(cls, *args, **kwargs)
@classmethod
def create_app(cls):
return app
def test_open(self):
"""
Test that the page loads
"""
page, resources = self.ghost.open(base_url)
self.assertEqual(page.url, base_url)
self.ghost.click("#run")
if __name__ == "__main__":
unittest.main()
|
[
"[email protected]"
] | |
28c05a44ba70abe18d6362f2f5149765c73adee1
|
4a4a24bf9521ef659d16fb08403242a77a9b9d77
|
/aos_l10n_id/models/localization.py
|
697c48b804e8e16763168c1459d8a44355bd4266
|
[] |
no_license
|
hassanfadl/Odoo12-1
|
601c4969c9d483590e8481e92ecaf4dddaac3847
|
bb057424138f99d0a645d185fbd26648385fbdf7
|
refs/heads/main
| 2023-07-31T22:59:19.597624 | 2021-10-01T06:35:58 | 2021-10-01T06:35:58 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,822 |
py
|
##############################################################################
#
# Copyright (C) 2011 ADSOFT OpenERP Partner Indonesia
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import api, fields, models, _
#from openerp import api, fields, models, _
import logging
_logger = logging.getLogger(__name__)
# try:
# import phonenumbers
# except Exception as e:
# _logger.warning(
# 'Import Error for phonenumbers, you will not be able to validate phone number.\n'
# 'Consider Installing phonenumbers or dependencies: https://pypi.python.org/pypi/phonenumbers/7.2.6.')
# raise e
class res_country_state(models.Model):
_inherit = "res.country.state"
#name = fields.Char(string='Province')
kabupaten_line = fields.One2many('res.kabupaten', 'state_id', string='Kabupaten')
class ResKabupaten(models.Model):
_name = "res.kabupaten"
_description = "List Kabupaten"
name = fields.Char(string='Kabupaten')
state_id = fields.Many2one('res.country.state', string="Province")
kecamatan_line = fields.One2many('res.kecamatan', 'kabupaten_id', string='Kecamatan')
@api.model
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
# TDE FIXME: strange
if self._context.get('search_default_province'):
args += [('state_id', '=', self._context['search_default_province'])]
return super(ResKabupaten, self)._search(args, offset=offset, limit=limit, order=order, count=count, access_rights_uid=access_rights_uid)
class ResKecamatan(models.Model):
_name = "res.kecamatan"
_description = "List Kecamatan"
name = fields.Char(string='Kecamatan')
state_id = fields.Many2one('res.country.state', string="Province")
kabupaten_id = fields.Many2one('res.kabupaten', string="Kabupaten")
kelurahan_line = fields.One2many('res.kelurahan', 'kecamatan_id', string='Kelurahan')
@api.model
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
# TDE FIXME: strange
if self._context.get('search_default_kabupaten'):
args += [('kabupaten_id', '=', self._context['search_default_kabupaten'])]
if self._context.get('search_default_province'):
args += [('state_id', '=', self._context['search_default_province'])]
return super(ResKecamatan, self)._search(args, offset=offset, limit=limit, order=order, count=count, access_rights_uid=access_rights_uid)
class ResKelurahan(models.Model):
_name = "res.kelurahan"
_description = "List Kelurahan"
name = fields.Char(string='Kelurahan')
state_id = fields.Many2one('res.country.state', string="Province")
kabupaten_id = fields.Many2one('res.kabupaten', string="Kabupaten")
kecamatan_id = fields.Many2one('res.kecamatan', string="Kecamatan")
zip = fields.Char("Kode Post")
@api.model
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
# TDE FIXME: strange
if self._context.get('search_default_zip'):
args += [('zip', '=', self._context['search_default_zip'])]
if self._context.get('search_default_kecamatan'):
args += [('kecamatan_id', '=', self._context['search_default_kecamatan'])]
if self._context.get('search_default_kabupaten'):
args += [('kabupaten_id', '=', self._context['search_default_kabupaten'])]
if self._context.get('search_default_province'):
args += [('state_id', '=', self._context['search_default_province'])]
return super(ResKelurahan, self)._search(args, offset=offset, limit=limit, order=order, count=count, access_rights_uid=access_rights_uid)
class res_race(models.Model):
_name = "res.race"
_description = "List RAS/Suku"
name = fields.Char(string='RAS', required=True , translate=True)
class res_religion(models.Model):
_name = "res.religion"
_description = "List Agama"
name = fields.Char(string='Religion', required=True , translate=True)
|
[
"[email protected]"
] | |
8728e365e7d7eb7024f6524d63406cd1993322f7
|
4b1d977acfde9354685157e02459c016c041421d
|
/tests/test_molecules.py
|
88a58a7272d50cddca1b5caf2d88a8175dd2b880
|
[] |
no_license
|
fujiisoup/pyspectra
|
f6c50d929e992ab6064ef978a4de0c0647ff3d4b
|
152bf37dee7e9eeabf42d24496566022d00d31ec
|
refs/heads/master
| 2023-07-25T08:23:13.637233 | 2023-07-05T16:32:30 | 2023-07-05T16:32:30 | 246,492,492 | 3 | 0 | null | 2023-07-05T16:32:32 | 2020-03-11T06:28:08 |
Python
|
UTF-8
|
Python
| false | false | 2,363 |
py
|
import numpy as np
from pyspectra import molecules, units, refractive_index, data
def test_level():
# fulcher
constants = data.diatomic_molecules("H2").sel(state="X 1Σg 1sσ2")
for key in constants:
print(key, constants[key].item())
upper = molecules.level("H2", "d 3Πu 3pπ", 0, 1)
lower = molecules.level("H2", "a 3Σg+ 2sσ", 0, 1)
wavelength = refractive_index.vacuum_to_air(units.eV_to_nm(upper - lower))
print(wavelength, units.eV_to_nm(upper - lower))
assert np.allclose(601.8299, wavelength, atol=2e-3, rtol=0)
def test_OH_X2():
"""
randomly choose levels from Table 27
"""
qnums = []
levels = []
# v, J, parity, 3/2 or 1/2
# qnums.append([0, 0.5, +1, 1]) # F1e
# levels.append(0.0000)
qnums.append([0, 0.5, +1, 2]) # F2e
levels.append(88.1066)
# qnums.append([0, 0.5, -1, 1]) # F1f
# levels.append(0.0000)
qnums.append([0, 0.5, -1, 2]) # F2f
levels.append(88.2642)
qnums.append([0, 1.5, +1, 1]) # F1e
levels.append(-38.2480)
qnums.append([0, 1.5, +1, 2]) # F2e
levels.append(149.3063)
qnums.append([0, 1.5, -1, 1]) # F1f
levels.append(-38.1926)
qnums.append([0, 1.5, -1, 2]) # F2f
levels.append(149.5662)
qnums.append([0, 10.5, +1, 1]) # F1e
levels.append(1976.8000)
qnums.append([0, 10.5, +1, 2]) # F2e
levels.append(2414.9290)
qnums.append([0, 10.5, -1, 1]) # F1f
levels.append(1981.4015)
qnums.append([0, 10.5, -1, 2]) # F2f
levels.append(2412.0731)
v, J, parity, spin = np.array(qnums).T
energies = molecules.level_OH_X2(v, J, parity, spin)
# for lev, en in zip(levels, energies):
# print('{} : {}'.format(lev, en))
assert np.allclose(energies, levels, atol=0.1)
qnums = []
levels = []
qnums.append([4, 13.5, +1, 1]) # F1e
levels.append(16062.2776)
qnums.append([4, 13.5, +1, 2]) # F2e
levels.append(16522.0293)
qnums.append([4, 13.5, -1, 1]) # F1f
levels.append(16068.1260)
qnums.append([4, 13.5, -1, 2]) # F2f
levels.append(16517.9751)
v, J, parity, spin = np.array(qnums).T
energies = molecules.level_OH_X2(v, J, parity, spin)
# for lev, en in zip(levels, energies):
# print('{} : {}'.format(lev, en))
assert np.allclose(energies, levels, atol=0.1)
|
[
"[email protected]"
] | |
2bdc663042e1e1aefc99f900694814b55def8c35
|
83de24182a7af33c43ee340b57755e73275149ae
|
/aliyun-python-sdk-arms/aliyunsdkarms/request/v20190808/SearchEventsRequest.py
|
3fae22f10c2789e7944a1a6c990d8133f134697a
|
[
"Apache-2.0"
] |
permissive
|
aliyun/aliyun-openapi-python-sdk
|
4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f
|
83fd547946fd6772cf26f338d9653f4316c81d3c
|
refs/heads/master
| 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 |
NOASSERTION
| 2023-09-14T08:51:06 | 2015-07-23T09:39:45 |
Python
|
UTF-8
|
Python
| false | false | 2,864 |
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkarms.endpoint import endpoint_data
class SearchEventsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'ARMS', '2019-08-08', 'SearchEvents','arms')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_IsTrigger(self): # Integer
return self.get_query_params().get('IsTrigger')
def set_IsTrigger(self, IsTrigger): # Integer
self.add_query_param('IsTrigger', IsTrigger)
def get_AppType(self): # String
return self.get_query_params().get('AppType')
def set_AppType(self, AppType): # String
self.add_query_param('AppType', AppType)
def get_EndTime(self): # Long
return self.get_query_params().get('EndTime')
def set_EndTime(self, EndTime): # Long
self.add_query_param('EndTime', EndTime)
def get_Pid(self): # String
return self.get_query_params().get('Pid')
def set_Pid(self, Pid): # String
self.add_query_param('Pid', Pid)
def get_CurrentPage(self): # Integer
return self.get_query_params().get('CurrentPage')
def set_CurrentPage(self, CurrentPage): # Integer
self.add_query_param('CurrentPage', CurrentPage)
def get_StartTime(self): # Long
return self.get_query_params().get('StartTime')
def set_StartTime(self, StartTime): # Long
self.add_query_param('StartTime', StartTime)
def get_AlertType(self): # Integer
return self.get_query_params().get('AlertType')
def set_AlertType(self, AlertType): # Integer
self.add_query_param('AlertType', AlertType)
def get_PageSize(self): # Integer
return self.get_query_params().get('PageSize')
def set_PageSize(self, PageSize): # Integer
self.add_query_param('PageSize', PageSize)
def get_AlertId(self): # Long
return self.get_query_params().get('AlertId')
def set_AlertId(self, AlertId): # Long
self.add_query_param('AlertId', AlertId)
|
[
"[email protected]"
] | |
9b1a9ff5d3d3ad9d0086cc8d179cdb717f6b6bde
|
6fcfb638fa725b6d21083ec54e3609fc1b287d9e
|
/python/yadayada_acd_cli/acd_cli-master/acdcli/utils/conf.py
|
9ebf249680620acffb9a172fb3c9591bb51f646c
|
[] |
no_license
|
LiuFang816/SALSTM_py_data
|
6db258e51858aeff14af38898fef715b46980ac1
|
d494b3041069d377d6a7a9c296a14334f2fa5acc
|
refs/heads/master
| 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 |
Python
|
UTF-8
|
Python
| false | false | 584 |
py
|
import configparser
import logging
import os
logger = logging.getLogger(__name__)
def get_conf(path, filename, default_conf: configparser.ConfigParser) \
-> configparser.ConfigParser:
conf = configparser.ConfigParser()
conf.read_dict(default_conf)
conffn = os.path.join(path, filename)
try:
with open(conffn) as cf:
conf.read_file(cf)
except OSError:
pass
logger.debug('configuration resulting from merging default and %s: %s' % (filename,
{section: dict(conf[section]) for section in conf}))
return conf
|
[
"[email protected]"
] | |
f09e09b066b83eb93839703b12f7fe62adf4b05a
|
8be3fbe41873b5682eed4da3aab93be657a893bc
|
/nested_admin/tests/three_deep/tests.py
|
a25c53f636b475f837708e24585fcaee22e597d2
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
theKomix/django-nested-admin
|
0b5f10b88928dc3167a720cf9a36f2ffe428cba7
|
2bfed729ba17bc69e4fe98d4a672b6b34186ae0f
|
refs/heads/master
| 2020-03-30T12:03:51.430420 | 2018-12-26T05:24:04 | 2018-12-26T05:24:04 | 151,206,354 | 0 | 1 |
NOASSERTION
| 2018-12-07T15:11:31 | 2018-10-02T05:49:00 |
Python
|
UTF-8
|
Python
| false | false | 4,893 |
py
|
from nested_admin.tests.base import BaseNestedAdminTestCase
from .models import TopLevel, LevelOne, LevelTwo, LevelThree
class TestDeepNesting(BaseNestedAdminTestCase):
root_model = TopLevel
nested_models = (LevelOne, LevelTwo, LevelThree)
@classmethod
def setUpClass(cls):
super(TestDeepNesting, cls).setUpClass()
cls.l1_model, cls.l2_model, cls.l3_model = cls.nested_models
def test_validationerror_on_empty_extra_parent_form(self):
toplevel = TopLevel.objects.create(name='a')
self.load_admin(toplevel)
self.set_field('name', 'c', indexes=[0, 0])
self.set_field('name', 'd', indexes=[0, 0, 0])
self.save_form()
field_id_with_error = self.selenium.execute_script(
"return $('ul.errorlist li').closest('.form-row').find('input').attr('id')")
self.assertEqual(field_id_with_error, "id_children-0-name")
def test_create_new(self):
self.load_admin()
self.set_field('name', 'a')
self.set_field('name', 'b', [0])
self.set_field('name', 'c', [0, 0])
self.set_field('name', 'd', [0, 0, 0])
self.save_form()
root_instances = self.root_model.objects.all()
self.assertNotEqual(len(root_instances), 0, "%s did not save" % self.root_model.__name__)
self.assertEqual(len(root_instances), 1, "Too many %s found" % self.root_model.__name__)
root = root_instances[0]
self.assertEqual(root.name, 'a', "%s.name has wrong value" % self.root_model.__name__)
l1_instances = root.children.all()
self.assertNotEqual(len(l1_instances), 0, "%s did not save" % self.l1_model.__name__)
self.assertEqual(len(l1_instances), 1, "Too many %s found" % self.l1_model.__name__)
l1_instance = l1_instances[0]
self.assertEqual(l1_instance.name, 'b', "%s.name has wrong value" % self.l1_model.__name__)
l2_instances = l1_instance.children.all()
self.assertNotEqual(len(l2_instances), 0, "%s did not save" % self.l2_model.__name__)
self.assertEqual(len(l2_instances), 1, "Too many %s found" % self.l2_model.__name__)
l2_instance = l2_instances[0]
self.assertEqual(l2_instance.name, 'c', "%s.name has wrong value" % self.l2_model.__name__)
l3_instances = l2_instance.children.all()
self.assertNotEqual(len(l3_instances), 0, "%s did not save" % self.l3_model.__name__)
self.assertEqual(len(l3_instances), 1, "Too many %s found" % self.l3_model.__name__)
l3_instance = l3_instances[0]
self.assertEqual(l3_instance.name, 'd', "%s.name has wrong value" % self.l3_model.__name__)
def test_create_new_no_extras(self):
self.load_admin()
self.set_field('name', 'a')
self.remove_inline([0])
self.add_inline(name='b')
self.remove_inline([0, 0])
self.add_inline([0], name='c')
self.remove_inline([0, 0, 0])
self.add_inline([0, 0], name='d')
self.save_form()
root_instances = self.root_model.objects.all()
self.assertNotEqual(len(root_instances), 0, "%s did not save" % self.root_model.__name__)
self.assertEqual(len(root_instances), 1, "Too many %s found" % self.root_model.__name__)
root = root_instances[0]
self.assertEqual(root.name, 'a', "%s.name has wrong value" % self.root_model.__name__)
l1_instances = root.children.all()
self.assertNotEqual(len(l1_instances), 0, "%s did not save" % self.l1_model.__name__)
self.assertEqual(len(l1_instances), 1, "Too many %s found" % self.l1_model.__name__)
l1_instance = l1_instances[0]
self.assertEqual(l1_instance.name, 'b', "%s.name has wrong value" % self.l1_model.__name__)
l2_instances = l1_instance.children.all()
self.assertNotEqual(len(l2_instances), 0, "%s did not save" % self.l2_model.__name__)
self.assertEqual(len(l2_instances), 1, "Too many %s found" % self.l2_model.__name__)
l2_instance = l2_instances[0]
self.assertEqual(l2_instance.name, 'c', "%s.name has wrong value" % self.l2_model.__name__)
l3_instances = l2_instance.children.all()
self.assertNotEqual(len(l3_instances), 0, "%s did not save" % self.l3_model.__name__)
self.assertEqual(len(l3_instances), 1, "Too many %s found" % self.l3_model.__name__)
l3_instance = l3_instances[0]
self.assertEqual(l3_instance.name, 'd', "%s.name has wrong value" % self.l3_model.__name__)
def test_save_missing_intermediate_inline(self):
self.load_admin()
self.set_field('name', 'a')
self.set_field('name', 'b', [0])
self.set_field('name', 'd', [0, 0, 0])
self.save_form()
root_instances = self.root_model.objects.all()
self.assertNotEqual(len(root_instances), 0, "%s did not save" % self.root_model.__name__)
|
[
"[email protected]"
] | |
cb3b6dee35a6278db9f968b94e96589d790b669c
|
699a43917ce75b2026a450f67d85731a0f719e01
|
/comonprefix/venv/Scripts/pip3.7-script.py
|
e913bfc5f695c5d68e2ba38d50ad1f005852ef42
|
[] |
no_license
|
wusanshou2017/Leetcode
|
96ab81ae38d6e04739c071acfc0a5f46a1c9620b
|
c4b85ca0e23700b84e4a8a3a426ab634dba0fa88
|
refs/heads/master
| 2021-11-16T01:18:27.886085 | 2021-10-14T09:54:47 | 2021-10-14T09:54:47 | 107,402,187 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 396 |
py
|
#!E:\lc\comonprefix\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.7'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.7')()
)
|
[
"[email protected]"
] | |
20acc266a70d5447f23a333ff82231fd7cc9eac7
|
9edaf93c833ba90ae9a903aa3c44c407a7e55198
|
/netex/models/version_type_enumeration.py
|
0e80ef2e2f270f90f4bcec2a491a896e7d6de716
|
[] |
no_license
|
tefra/xsdata-samples
|
c50aab4828b8c7c4448dbdab9c67d1ebc519e292
|
ef027fe02e6a075d8ed676c86a80e9647d944571
|
refs/heads/main
| 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 |
Python
|
UTF-8
|
Python
| false | false | 155 |
py
|
from enum import Enum
__NAMESPACE__ = "http://www.netex.org.uk/netex"
class VersionTypeEnumeration(Enum):
POINT = "point"
BASELINE = "baseline"
|
[
"[email protected]"
] | |
ec9afabfdd6a3fb5b54dcd3df3f3f3a0b67ae01e
|
a76790fa5f4eb96a8b731f891ca1aa4c16d21256
|
/azext_iot/dps/providers/discovery.py
|
433c7fe8409c6128ccc6fbaf4f22840408eae3da
|
[
"MIT"
] |
permissive
|
digimaun/azure-iot-cli-extension
|
414fb1c7c22b0f0d0891cd30c28d13366b9f7207
|
9999c536bbf67251d863d365c190866e1d5cc1ad
|
refs/heads/dev
| 2023-06-24T09:42:51.069627 | 2022-12-14T23:29:58 | 2022-12-14T23:29:58 | 579,177,610 | 1 | 0 |
NOASSERTION
| 2022-12-16T21:25:31 | 2022-12-16T21:25:31 | null |
UTF-8
|
Python
| false | false | 3,899 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.log import get_logger
from azure.cli.core.commands.client_factory import get_subscription_id
from azext_iot.common._azure import IOT_SERVICE_CS_TEMPLATE
from azext_iot.common.base_discovery import BaseDiscovery
from azext_iot.common.shared import DiscoveryResourceType
from azext_iot.common.utility import ensure_iotdps_sdk_min_version
from azext_iot.constants import IOTDPS_TRACK_2_SDK_MIN_VERSION
from azext_iot.dps.models.dps_target import DPSTarget
from azext_iot._factory import iot_service_provisioning_factory
from typing import Any, Dict
logger = get_logger(__name__)
PRIVILEDGED_ACCESS_RIGHTS_SET = set(
["ServiceConfig", "EnrollmentWrite"]
)
class DPSDiscovery(BaseDiscovery):
def __init__(self, cmd):
super().__init__(
cmd=cmd,
necessary_rights_set=PRIVILEDGED_ACCESS_RIGHTS_SET,
resource_type=DiscoveryResourceType.DPS.value
)
def _initialize_client(self):
if not self.client:
# Track 2 could be supported
self.track2 = ensure_iotdps_sdk_min_version(IOTDPS_TRACK_2_SDK_MIN_VERSION)
if getattr(self.cmd, "cli_ctx", None):
# The client we want to use is an attribute of the client returned
# from the factory. This will have to be revisted if the DPS sdk changes.
self.client = iot_service_provisioning_factory(self.cmd.cli_ctx).iot_dps_resource
self.sub_id = get_subscription_id(self.cmd.cli_ctx)
else:
self.client = self.cmd
# Method get_keys_for_key_name needed for policy discovery (see
# BaseDiscovery.find_policy for usage) and is defined as
# list)keys_for_key_name in the DPS Sdk.
self.client.get_keys_for_key_name = self.client.list_keys_for_key_name
def _make_kwargs(self, **kwargs) -> Dict[str, Any]:
# The DPS client needs the provisioning_service_name argument
kwargs["provisioning_service_name"] = kwargs.pop("resource_name")
return kwargs
@classmethod
def get_target_by_cstring(cls, connection_string: str) -> DPSTarget:
return DPSTarget.from_connection_string(cstring=connection_string).as_dict()
def _build_target(
self, resource, policy, key_type: str = None, **kwargs
) -> Dict[str, str]:
# This is more or less a compatibility function which produces the
# same result as _azure.get_iot_dps_connection_string()
# In future iteration we will return a 'Target' object rather than dict
# but that will be better served aligning with vNext pattern for DPS
result = {}
result["cs"] = IOT_SERVICE_CS_TEMPLATE.format(
resource.properties.service_operations_host_name,
policy.key_name,
policy.primary_key if key_type == "primary" else policy.secondary_key,
)
result["entity"] = resource.properties.service_operations_host_name
result["policy"] = policy.key_name
result["primarykey"] = policy.primary_key
result["secondarykey"] = policy.secondary_key
result["subscription"] = self.sub_id
result["cmd"] = self.cmd
result["idscope"] = resource.properties.id_scope
return result
def get_id_scope(self, resource_name: str, rg: str = None) -> str:
"""Get the ID scope. Only needed for certain DPS operations."""
return self.find_resource(
resource_name=resource_name, rg=rg
).properties.id_scope
|
[
"[email protected]"
] | |
b2e416b830f8a762c57a51d0493a629a1344ef3f
|
255e19ddc1bcde0d3d4fe70e01cec9bb724979c9
|
/dockerized-gists/7c04cc141bd3fc5f0ce1/snippet.py
|
4a77c7cc3e4f198f1906ec927652c704233af5b1
|
[
"MIT"
] |
permissive
|
gistable/gistable
|
26c1e909928ec463026811f69b61619b62f14721
|
665d39a2bd82543d5196555f0801ef8fd4a3ee48
|
refs/heads/master
| 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 |
Python
|
UTF-8
|
Python
| false | false | 2,740 |
py
|
def _download(host, creds, fp):
chunk_size = 512 * 1024
headers = {
'Content-Type': 'application/octet-stream'
}
filename = os.path.basename(fp)
uri = 'https://%s/mgmt/cm/autodeploy/software-image-downloads/%s' % (host, filename)
requests.packages.urllib3.disable_warnings()
with open(fp, 'wb') as f:
start = 0
end = chunk_size - 1
size = 0
current_bytes = 0
while True:
content_range = "%s-%s/%s" % (start, end, size)
headers['Content-Range'] = content_range
#print headers
resp = requests.get(uri,
auth=creds,
headers=headers,
verify=False,
stream=True)
if resp.status_code == 200:
# If the size is zero, then this is the first time through the
# loop and we don't want to write data because we haven't yet
# figured out the total size of the file.
if size > 0:
current_bytes += chunk_size
for chunk in resp.iter_content(chunk_size):
f.write(chunk)
# Once we've downloaded the entire file, we can break out of
# the loop
if end == size:
break
crange = resp.headers['Content-Range']
# Determine the total number of bytes to read
if size == 0:
size = int(crange.split('/')[-1]) - 1
# If the file is smaller than the chunk size, BIG-IP will
# return an HTTP 400. So adjust the chunk_size down to the
# total file size...
if chunk_size > size:
end = size
# ...and pass on the rest of the code
continue
start += chunk_size
if (current_bytes + chunk_size) > size:
end = size
else:
end = start + chunk_size - 1
if __name__ == "__main__":
import os, requests, argparse, getpass
parser = argparse.ArgumentParser(description='Download File from BIG-IP')
parser.add_argument("host", help='BIG-IP IP or Hostname', )
parser.add_argument("username", help='BIG-IP Username')
parser.add_argument("filepath", help='Destination Filename & Path')
args = vars(parser.parse_args())
hostname = args['host']
username = args['username']
filepath = args['filepath']
print "%s, enter your password: " % args['username'],
password = getpass.getpass()
_download(hostname, (username, password), filepath)
|
[
"[email protected]"
] | |
fdfe941f2d276a821a9342bce3e3e89214a7ecfe
|
4b7e282fe480415f5d52c0fc0429f144156190fe
|
/google/ads/googleads/v7/resources/types/video.py
|
da5d5c3d0b355e480d86c2f921f4d36b37f58b30
|
[
"Apache-2.0"
] |
permissive
|
Z2Xsoft/google-ads-python
|
c4750357bb19da91bb3b6bf2fa84bef9d2df36d3
|
1779d52a0446c8afb2437b0a9e103dcb849f5590
|
refs/heads/main
| 2023-08-18T15:22:17.840364 | 2021-09-26T04:08:53 | 2021-09-26T04:08:53 | 410,444,398 | 0 | 0 |
Apache-2.0
| 2021-09-26T04:08:53 | 2021-09-26T03:55:38 | null |
UTF-8
|
Python
| false | false | 1,756 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v7.resources",
marshal="google.ads.googleads.v7",
manifest={"Video",},
)
class Video(proto.Message):
r"""A video.
Attributes:
resource_name (str):
Output only. The resource name of the video. Video resource
names have the form:
``customers/{customer_id}/videos/{video_id}``
id (str):
Output only. The ID of the video.
channel_id (str):
Output only. The owner channel id of the
video.
duration_millis (int):
Output only. The duration of the video in
milliseconds.
title (str):
Output only. The title of the video.
"""
resource_name = proto.Field(proto.STRING, number=1,)
id = proto.Field(proto.STRING, number=6, optional=True,)
channel_id = proto.Field(proto.STRING, number=7, optional=True,)
duration_millis = proto.Field(proto.INT64, number=8, optional=True,)
title = proto.Field(proto.STRING, number=9, optional=True,)
__all__ = tuple(sorted(__protobuf__.manifest))
|
[
"[email protected]"
] | |
4728768877333c9060e01d5d66cd0b2dc8cd58e2
|
74f04d78486d4986e4f0ef8c3bc480ba00caae4a
|
/articles/models.py
|
c9ee25d3e4b0b92deaea58fc10f1b49de02c4ee6
|
[] |
no_license
|
kimjy392/reboot-django
|
e24dd90182ee5d317bf13872ae169ac738a71c6c
|
4280c7bffacd759a1b785ae576e9e89a0c2269d8
|
refs/heads/master
| 2022-12-10T12:18:38.422146 | 2019-10-28T01:12:41 | 2019-10-28T01:12:41 | 207,240,898 | 0 | 10 | null | 2022-12-08T06:13:37 | 2019-09-09T06:36:38 |
Python
|
UTF-8
|
Python
| false | false | 594 |
py
|
from django.db import models
# Create your models here.
class Reporter(models.Model):
name = models.CharField(max_length=20)
class Article(models.Model):
title = models.CharField(max_length=50)
content = models.TextField()
image = models.ImageField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
# reporter = models.ForeignKey(Reporter, on_delete=models.CASCADE)
class Comment(models.Model):
content = models.CharField(max_length=50)
article = models.ForeignKey(Article, on_delete=models.CASCADE)
|
[
"[email protected]"
] | |
2f14e4dbbb349aed3998968c565c70758358ae4e
|
23f73a7a0c0ced134f6c18bb9c200617ce31f1d5
|
/src/fauxmo/handlers/hass.py
|
6b72bc98a0323f0764aee236d59e2be04d96d90a
|
[
"MIT"
] |
permissive
|
clach04/fauxmo
|
f586d0024648f3da6d2ff38b8fe06fdb345bcfbd
|
06a0b8ff20f4811de9ac08663e0d76f8fdd83764
|
refs/heads/master
| 2021-11-27T19:13:36.583893 | 2016-07-22T13:02:39 | 2016-07-22T13:02:39 | 66,683,609 | 2 | 0 | null | 2016-08-26T22:42:52 | 2016-08-26T22:42:52 | null |
UTF-8
|
Python
| false | false | 2,671 |
py
|
# -*- coding: utf-8 -*-
import homeassistant.remote
from homeassistant.const import (SERVICE_TURN_ON, SERVICE_TURN_OFF,
SERVICE_MOVE_UP, SERVICE_MOVE_DOWN)
class HassAPIHandler:
"""Handler for Home Assistant (hass) Python API.
Allows users to specify Home Assistant services in their config.json and
toggle these with the Echo. While this can be done with Home Assistant's
REST API as well (example included), I find it easier to use the Python
API.
"""
def __init__(self, host, password, entity, port=8123):
"""Initialize a HassAPIHandler instance
Args:
host (str): IP address of device running Home Assistant
password (str): Home Assistant password
entity (str): `entity_id` used by hass, one easy way to find is to
curl and grep the REST API, eg:
`curl http://IP/api/bootstrap | grep entity_id`
Kwargs:
port (int): Port running hass on the host computer (default 8123)
"""
self.host = host
self.password = password
self.entity = entity
self.port = port
self.domain = self.entity.split(".")[0]
self.api = homeassistant.remote.API(self.host, self.password,
port=self.port)
self.service_map = {
'switch': {
'on': SERVICE_TURN_ON,
'off': SERVICE_TURN_OFF
},
'rollershutter': {
'on': SERVICE_MOVE_UP,
'off': SERVICE_MOVE_DOWN
}
}
def send(self, signal):
"""Send a signal to the hass `call_service` function, returns True.
The hass Python API doesn't appear to return anything with this
function, but will raise an exception if things didn't seem to work, so
I have it set to just return True, hoping for an exception if there was
a problem.
Args:
signal (const): signal imported from homeassistant.const. I have
imported SERVICE_TURN_ON and SERVICE_TURN_OFF, make sure you import
any others that you need.
"""
homeassistant.remote.call_service(self.api, self.domain, signal,
{'entity_id': self.entity})
return True
def on(self):
on_cmd = self.service_map[self.domain.lower()]['on']
return self.send(on_cmd)
def off(self):
off_cmd = self.service_map[self.domain.lower()]['off']
return self.send(off_cmd)
|
[
"[email protected]"
] | |
0554e077b0db3b39fc887e6b4986a336cc20fc9a
|
6a7d8b67aad59c51dafdfb8bcffd53864a3d65b0
|
/LintCode/inorderSuccessorBST.py
|
4967d1c45481e78e4f3fb69538a9e2576d98cf12
|
[] |
no_license
|
dicao425/algorithmExercise
|
8bba36c1a08a232678e5085d24bac1dbee7e5364
|
36cb33af758b1d01da35982481a8bbfbee5c2810
|
refs/heads/master
| 2021-10-07T08:56:18.030583 | 2018-12-04T05:59:17 | 2018-12-04T05:59:17 | 103,611,760 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 980 |
py
|
#!/usr/bin/python
import sys
"""
Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
"""
class Solution:
"""
@param: root: The root of the BST.
@param: p: You need find the successor node of p.
@return: Successor of p.
"""
def inorderSuccessor(self, root, p):
# write your code here
if not root:
return
suc = None
while root and root.val != p.val:
if root.val > p.val:
suc = root
root = root.left
else:
root = root.right
if not root:
return
if not root.right:
return suc
else:
root = root.right
while root.left:
root = root.left
return root
def main():
aa = Solution()
return 0
if __name__ == "__main__":
sys.exit(main())
|
[
"[email protected]"
] | |
2261cf66860e5e03da76218a1e66eb199a78667d
|
fc66f771e95ee36cd502d3cf7220794e6f263226
|
/src/utils/at.py
|
ca3ac83c786efd55252a4fe7853b8b4d9a002805
|
[
"MIT"
] |
permissive
|
yuanniufei/IncetOps
|
2bcb7851514f3db6bc409746d245da08032ecc06
|
e21185a4931a10996a187e63f4487b4402544c69
|
refs/heads/master
| 2020-03-25T20:50:02.339329 | 2018-08-09T07:35:02 | 2018-08-09T07:35:02 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,639 |
py
|
# -*- coding: utf-8 -*-
"""
IncetOps.utils.at
~~~~~~~~~~~~~~
AES加密的实现模式CBC。
CBC使用密码和salt(起扰乱作用)按固定算法(md5)产生key和iv。然后用key和iv(初始向量,加密第一块明文)加密(明文)和解密(密文)。
:copyright: (c) 2018 by staugur.
:license: MIT, see LICENSE for more details.
"""
from Crypto.Cipher import AES
from binascii import b2a_hex, a2b_hex
class KeyGenerationClass():
"""密钥生成器"""
def __init__(self, key):
self.key = key
self.mode = AES.MODE_CBC
def encrypt(self, text):
#加密函数,如果text不是16的倍数【加密文本text必须为16的倍数!】,那就补足为16的倍数
cryptor = AES.new(self.key, self.mode, self.key)
#这里密钥key 长度必须为16(AES-128)、24(AES-192)、或32(AES-256)Bytes 长度.目前AES-128足够用
length = 16
count = len(text)
add = length - (count % length)
text = text + ('\0' * add)
self.ciphertext = cryptor.encrypt(text)
#因为AES加密时候得到的字符串不一定是ascii字符集的,输出到终端或者保存时候可能存在问题
#所以这里统一把加密后的字符串转化为16进制字符串
return b2a_hex(self.ciphertext)
def decrypt(self, text):
#解密后,去掉补足的空格用strip() 去掉
cryptor = AES.new(self.key, self.mode, self.key)
plain_text = cryptor.decrypt(a2b_hex(text))
return plain_text.rstrip('\0')
|
[
"[email protected]"
] | |
4e107f975e9b205c04868eafff741a552f4302c0
|
d57b51ec207002e333b8655a8f5832ed143aa28c
|
/.history/gos_20200614060821.py
|
3db5babdabe6fb83a5ab594602a18e4fa77fbc59
|
[] |
no_license
|
yevheniir/python_course_2020
|
b42766c4278a08b8b79fec77e036a1b987accf51
|
a152d400ab4f45d9d98d8ad8b2560d6f0b408c0b
|
refs/heads/master
| 2022-11-15T07:13:24.193173 | 2020-07-11T15:43:26 | 2020-07-11T15:43:26 | 278,890,802 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,896 |
py
|
# # Імпорт фажливих бібліотек
# from BeautifulSoup import BeautifulSoup
# import urllib2
# import re
# # Створення функції пошуку силок
# def getLinks(url):
# # отримання та присвоєння контенту сторінки в змінну
# html_page = urllib2.urlopen(url)
# # Перетворення контенту в обєкт бібліотеки BeautifulSoup
# soup = BeautifulSoup(html_page)
# # створення пустого масиву для лінків
# links = []
# # ЗА ДОПОМОГОЮ ЧИКЛУ ПРОХЛДИМСЯ ПО ВСІХ ЕЛЕМЕНТАХ ДЕ Є СИЛКА
# for link in soup.findAll('a', attrs={'href': re.compile("^http://")}):
# # Додаємо всі силки в список
# links.append(link.get('href'))
# # повертаємо список
# return links
# -----------------------------------------------------------------------------------------------------------
# # # Імпорт фажливих бібліотек
# import subprocess
# # Створення циклу та використання функції range для генерації послідовних чисел
# for ping in range(1,10):
# # генерування IP адреси базуючись на номері ітерації
# address = "127.0.0." + str(ping)
# # виклик функції call яка робить запит на IP адрес та запис відповіді в змінну
# res = subprocess.call(['ping', '-c', '3', address])
# # За допомогою умовних операторів перевіряємо відповідь та виводимо результат
# if res == 0:
# print "ping to", address, "OK"
# elif res == 2:
# print "no response from", address
# else:
# print "ping to", address, "failed!"
# -----------------------------------------------------------------------------------------------------------
# # Імпорт фажливих бібліотек
# import requests
# # Ітеруємося по масиву з адресами зображень
# for i, pic_url in enumerate(["http://x.com/nanachi.jpg", "http://x.com/nezuko.jpg"]):
# # Відкриваємо файл базуючись на номері ітерації
# with open('pic{0}.jpg'.format(i), 'wb') as handle:
# # Отримуємо картинку
# response = requests.get(pic_url, stream=True)
# # Використовуючи умовний оператор перевіряємо чи успішно виконався запит
# if not response.ok:
# print(response)
# # Ітеруємося по байтах картинки та записуємо батчаси в 1024 до файлу
# for block in response.iter_content(1024):
# # Якщо байти закінчилися, завершуємо алгоритм
# if not block:
# break
# # Записуємо байти в файл
# handle.write(block)
# -----------------------------------------------------------------------------------------------------------
# # Створюємо клас для рахунку
# class Bank_Account:
# # В конструкторі ініціалізуємо рахунок як 0
# def __init__(self):
# self.balance=0
# print("Hello!!! Welcome to the Deposit & Withdrawal Machine")
# # В методі депозит, використовуючи функцію input() просимо ввести суму поповенння та додаємо цю суму до рахунку
# def deposit(self):
# amount=float(input("Enter amount to be Deposited: "))
# self.balance += amount
# print("\n Amount Deposited:",amount)
# # В методі депозит, використовуючи функцію input() просимо ввести суму отримання та віднімаємо цю суму від рахунку
# def withdraw(self):
# amount = float(input("Enter amount to be Withdrawn: "))
# # За допомогою умовного оператора перевіряємо чи достатнього грошей на рахунку
# if self.balance>=amount:
# self.balance-=amount
# print("\n You Withdrew:", amount)
# else:
# print("\n Insufficient balance ")
# # Виводимо бааланс на екран
# def display(self):
# print("\n Net Available Balance=",self.balance)
# # Створюємо рахунок
# s = Bank_Account()
# # Проводимо операції з рахунком
# s.deposit()
# s.withdraw()
# s.display()
# -----------------------------------------------------------------------------------------------------------
# Створюємо рекурсивну функцію яка приймає десяткове число
def decimalToBinary(n):
# перевіряємо чи число юільше 1
if(n > 1):
# Якщо так, ділемо на 2 юез остачі та рекурсивно викликаємо функцію
decimalToBinary(n//2)
# Якщо ні, виводимо на остачу ділення числа на 2
print(n%2, end=' ')
# Створюємо функцію яка приймає бі число
def binaryToDecimal(binary):
binary1 = binary
decimal, i, n = 0, 0, 0
while(binary != 0):
dec = binary % 10
decimal = decimal + dec * pow(2, i)
binary = binary//10
i += 1
print(decimal)
|
[
"[email protected]"
] | |
225cc84d1b8df33afa6b99407f6dad6ab6b09f7f
|
1d007e58c7739f36bdb85cb9aa13b3f4584cdfb9
|
/day1/day1/urls.py
|
28f2a39c7c890b2c071f918d1dcef7609bddfad4
|
[] |
no_license
|
rahuladream/Django-Challenge
|
65410f053c06f2556628b449b817244dac32e1ac
|
0b81a6b69b743799f3d8562d6ec784950980716c
|
refs/heads/master
| 2020-06-14T14:19:54.370195 | 2019-07-03T13:02:07 | 2019-07-03T13:02:07 | 195,024,788 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 912 |
py
|
"""day1 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
# from . import admin_site
urlpatterns = [
path('polls/', include('polls.urls', namespace="polls")),
# path('myadmin/', include('admin_site.urls')), # grappelli URLS
path('admin/', admin.site.urls),
]
|
[
"[email protected]"
] | |
1f638f6038f33df2aa4f2e79d8b32c4280c955fd
|
3db8bc4c7297895c687be374a206b63d5d329e5e
|
/Python3/019_Remove_Nth_Node_From_End_of_List.py
|
0d1d00b05fb6965d0c5f5762555d56236207eb67
|
[
"MIT"
] |
permissive
|
Jian-jobs/Leetcode-Python3
|
dd06d3238b69ae1419754810dec68705d3344a41
|
f2d3bb6ecb7d5d0bca4deaed26162fbe0813a73e
|
refs/heads/master
| 2020-05-15T00:59:28.160898 | 2018-11-16T04:44:32 | 2018-11-16T04:44:32 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,425 |
py
|
#!usr/bin/env python3
# -*- coding:utf-8 -*-
'''
Given a linked list, remove the nth node from the end of list and return its head.
For example,
Given linked list: 1->2->3->4->5, and n = 2.
After removing the second node from the end, the linked list becomes 1->2->3->5.
Note:
Given n will always be valid.
Try to do this in one pass.
'''
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
# Define this to check if it works well
def myPrint(self):
print(self.val)
if self.next:
self.next.myPrint()
class Solution(object):
def removeNthFromEnd(self, head, n):
"""
:type head: ListNode
:type n: int
:rtype: ListNode
"""
if not head:
return head
point = ListNode(-1)
point.next = head
prev = point
cur = point
while prev and n >=0:
prev = prev.next
n -= 1
while prev:
prev = prev.next
cur = cur.next
cur.next = cur.next.next
return point.next
if __name__ == "__main__":
n5 = ListNode(5)
n4 = ListNode(4)
n3 = ListNode(3)
n2 = ListNode(2)
n1 = ListNode(1)
n1.next = n2
n2.next = n3
n3.next = n4
n4.next = n5
result = Solution().removeNthFromEnd(n1, 5)
result.myPrint()
|
[
"[email protected]"
] | |
eed66ce765aa9eae0228a51ffc68c16ad9405ae4
|
1816378da612c7db376934b033e4fd64951338b6
|
/gui/jails/migrations/0007_add_model_JailTemplate.py
|
dc21d06fbe871543b3648239738a169c72011b35
|
[] |
no_license
|
quater/freenas-9.2-xen
|
46517a7a23546764347d3c91108c70a8bd648ec6
|
96e580055fa97575f0a0cb23a72495860467bcfb
|
refs/heads/master
| 2021-01-16T22:21:38.781962 | 2014-02-07T05:59:13 | 2014-02-07T05:59:13 | 16,609,785 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,422 |
py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from freenasUI.jails.utils import get_jails_index
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'JailTemplate'
db.create_table(u'jails_jailtemplate', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('jt_name', self.gf('django.db.models.fields.CharField')(max_length=120)),
('jt_url', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal(u'jails', ['JailTemplate'])
#
# The standard jail types
#
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('pluginjail', '%s/freenas-pluginjail.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('portjail', '%s/freenas-portjail.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('standard', '%s/freenas-standard.tgz')" % get_jails_index())
#
# And... some Linux jail templates
#
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('debian-7.1.0', '%s/linux-debian-7.1.0.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('gentoo-20130820', '%s/linux-gentoo-20130820.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('ubuntu-13.04', '%s/linux-ubuntu-13.04.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('centos-6.4', '%s/linux-centos-6.4.tgz')" % get_jails_index())
db.execute("insert into jails_jailtemplate (jt_name, jt_url) "
"values ('suse-12.3', '%s/linux-suse-12.3.tgz')" % get_jails_index())
def backwards(self, orm):
# Deleting model 'JailTemplate'
db.delete_table(u'jails_jailtemplate')
models = {
u'jails.jails': {
'Meta': {'object_name': 'Jails'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jail_alias_bridge_ipv4': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_alias_bridge_ipv6': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_alias_ipv4': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_alias_ipv6': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_autostart': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_bridge_ipv4': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_bridge_ipv6': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_defaultrouter_ipv4': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_defaultrouter_ipv6': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_host': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'jail_ipv4': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_ipv6': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_mac': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_nat': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'}),
'jail_status': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'jail_type': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'jail_vnet': ('django.db.models.fields.CharField', [], {'max_length': '120', 'null': 'True', 'blank': 'True'})
},
u'jails.jailsconfiguration': {
'Meta': {'object_name': 'JailsConfiguration'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jc_collectionurl': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'jc_ipv4_network': ('freenasUI.freeadmin.models.Network4Field', [], {'max_length': '18', 'blank': 'True'}),
'jc_ipv4_network_end': ('freenasUI.freeadmin.models.Network4Field', [], {'max_length': '18', 'blank': 'True'}),
'jc_ipv4_network_start': ('freenasUI.freeadmin.models.Network4Field', [], {'max_length': '18', 'blank': 'True'}),
'jc_ipv6_network': ('freenasUI.freeadmin.models.Network6Field', [], {'max_length': '43', 'blank': 'True'}),
'jc_ipv6_network_end': ('freenasUI.freeadmin.models.Network6Field', [], {'max_length': '43', 'blank': 'True'}),
'jc_ipv6_network_start': ('freenasUI.freeadmin.models.Network6Field', [], {'max_length': '43', 'blank': 'True'}),
'jc_path': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
u'jails.jailtemplate': {
'Meta': {'object_name': 'JailTemplate'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jt_name': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'jt_url': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'jails.nullmountpoint': {
'Meta': {'object_name': 'NullMountPoint'},
'destination': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jail': ('django.db.models.fields.CharField', [], {'max_length': '120'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '300'})
}
}
complete_apps = ['jails']
|
[
"[email protected]"
] | |
e2cb2c8e89a8b49e48345e5c5ac0b7f4d4038e0c
|
d913bac9fa42473aa8cee68c8ad8b4eba5484b89
|
/Scripts/features/VoidTender_POS.py
|
6f88141ad64358009955cec6efcfc5ed742ca805
|
[] |
no_license
|
priyatam0509/Automation-Testing
|
07e7c18b4522976f0ade2b72bd46cffd55c5634e
|
d24805456e5a0126c036c1688a5d112bdcf4467a
|
refs/heads/main
| 2023-02-26T19:07:41.761905 | 2021-01-30T10:13:34 | 2021-01-30T10:13:34 | 334,376,899 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,424 |
py
|
"""
File name: VoidTender_POS.py
Tags:
Description:
Author: Gene Todd
Date created: 2020-04-16 09:40:28
Date last modified:
Python Version: 3.7
"""
import logging, time
from app import Navi, mws, pos, system
from app.framework.tc_helpers import setup, test, teardown, tc_fail
class VoidTender_POS():
"""
Description: Test class that provides an interface for testing.
"""
def __init__(self):
self.log = logging.getLogger()
@setup
def setup(self):
"""
Performs any initialization that is not default.
"""
#if not system.restore_snapshot():
# self.log.debug("No snapshot to restore, if this is not expected please contact automation team")
pos.connect()
pos.sign_on()
@test
def test_voidCash(self):
"""
Basic void tender case using cash. Reason codes enabled.
"""
self.prep_trans()
self.log.info("Adding tender")
pos.enter_keypad(100, after="Enter")
# Assume the tender has already been selected when it was added
self.log.info("Voiding cash tender")
pos.click_tender_key("Void")
# Confirms the reason codes appeared
pos.select_list_item("Cashier Error")
pos.click("Enter")
# Confirm the tender is gone
jrnl = pos.read_transaction_journal()
for line in jrnl:
if "Cash" in line:
tc_fail("Cash tender found in transaction after being voided")
self.log.info("Cash confirmed no longer in transaction journal")
# Pay out the transaction for the next test
self.log.info("Paying out transaction")
pos.click_tender_key("Exact Change")
pos.is_element_present(pos.controls['function keys']['tools'], timeout=5)
@test
def test_noReasonCodes(self):
"""
Tests our ability to void tenders without reason codes enabled
"""
# Disable reason codes
pos.close()
self.log.info("Removing void tender reason code")
Navi.navigate_to('Register Group Maintenance')
mws.click_toolbar('Change')
mws.select_tab('Reason Codes')
mws.set_value('Available Functions', 'Void Tender')
mws.set_value('Require Reason Code', False)
mws.click_toolbar('Save')
pos.connect()
tries = 0
while mws.get_top_bar_text() and tries < 10:
self.log.info("Waiting for reload options...")
tries = tries + 1
time.sleep(.5)
self.prep_trans()
self.log.info("Adding tender")
pos.enter_keypad(100, after="Enter")
# Assume the tender has already been selected when it was added
self.log.info("Voiding cash tender")
pos.click_tender_key("Void")
# Wait for void to process
pos.is_element_present(pos.controls['pay']['exact_amount'], timeout=5)
# Confirm the tender is gone
jrnl = pos.read_transaction_journal()
for line in jrnl:
if "Cash" in line:
tc_fail("Cash tender found in transaction after being voided")
self.log.info("Cash confirmed no longer in transaction journal")
# Pay out the transaction for the next test
self.log.info("Paying out transaction")
pos.click_tender_key("Exact Change")
pos.is_element_present(pos.controls['function keys']['tools'], timeout=5)
@teardown
def teardown(self):
"""
Performs cleanup after this script ends.
"""
pos.close()
# Re-enable reason codes
self.log.info("Removing void tender reason code")
Navi.navigate_to('Register Group Maintenance')
mws.click_toolbar('Change')
mws.select_tab('Reason Codes')
mws.set_value('Available Functions', 'Void Tender')
mws.set_value('Require Reason Code', True)
mws.click_toolbar('Save')
def prep_trans(self):
"""
Helper function for adding an item and getting to the pay screen for tests
"""
self.log.info("Setting up transaction for VoidTender test...")
pos.click("Item 1")
pos.enter_keypad(1000, after="Enter")
pos.click("Pay")
self.log.info("... Setup complete")
|
[
"[email protected]"
] | |
de3a0c28cc1023aa05a34f5fd437c0431ba35fee
|
781e2692049e87a4256320c76e82a19be257a05d
|
/all_data/exercism_data/python/leap/42142e465a234cfaa158392bdda680b9.py
|
2e0e27725c2144af3babc0a50be01d1f5932c483
|
[] |
no_license
|
itsolutionscorp/AutoStyle-Clustering
|
54bde86fe6dbad35b568b38cfcb14c5ffaab51b0
|
be0e2f635a7558f56c61bc0b36c6146b01d1e6e6
|
refs/heads/master
| 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null |
UTF-8
|
Python
| false | false | 353 |
py
|
def is_leap_year(year):
if year % 4 == 0:
if year % 100 == 0:
if year % 400 == 0:
return True
return False
return True
return False
"""
on every year that is evenly divisible by 4
except every year that is evenly divisible by 100
unless the year is also evenly divisible by 400
"""
|
[
"[email protected]"
] | |
b5e435d58d0527b0a10b2c3d2ddb08609b44daa9
|
da9c4a9a92d49d2fb2983a54e0f64c2a1ce8aa19
|
/symphony/cli/pysymphony/graphql/input/add_image.py
|
8a71be0cda0a9d98d166adf96a4a1fc7a8c266e2
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
rohan-prasad/magma
|
347c370347724488215a0783504788eac41d8ec7
|
2c1f36d2fd04eae90366cc8b314eaab656d7f8ad
|
refs/heads/master
| 2022-10-14T14:08:14.067593 | 2020-06-11T23:52:03 | 2020-06-11T23:54:27 | 271,671,835 | 0 | 0 |
NOASSERTION
| 2020-06-12T00:20:23 | 2020-06-12T00:17:39 | null |
UTF-8
|
Python
| false | false | 748 |
py
|
#!/usr/bin/env python3
# @generated AUTOGENERATED file. Do not Change!
from dataclasses import dataclass
from datetime import datetime
from functools import partial
from gql.gql.datetime_utils import DATETIME_FIELD
from numbers import Number
from typing import Any, Callable, List, Mapping, Optional
from dataclasses_json import DataClassJsonMixin
from gql.gql.enum_utils import enum_field
from ..enum.image_entity import ImageEntity
@dataclass
class AddImageInput(DataClassJsonMixin):
entityType: ImageEntity = enum_field(ImageEntity)
entityId: str
imgKey: str
fileName: str
fileSize: int
modified: datetime = DATETIME_FIELD
contentType: str
category: Optional[str] = None
annotation: Optional[str] = None
|
[
"[email protected]"
] | |
7c68e9555011e76ecb807ab9b5340bbc994a8aca
|
64bf39b96a014b5d3f69b3311430185c64a7ff0e
|
/intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_pkg_firewall_policy_vpndstnode.py
|
cc4c839796dde50386c6787ae3951a868ea8cab2
|
[
"MIT"
] |
permissive
|
SimonFangCisco/dne-dna-code
|
7072eba7da0389e37507b7a2aa5f7d0c0735a220
|
2ea7d4f00212f502bc684ac257371ada73da1ca9
|
refs/heads/master
| 2023-03-10T23:10:31.392558 | 2021-02-25T15:04:36 | 2021-02-25T15:04:36 | 342,274,373 | 0 | 0 |
MIT
| 2021-02-25T14:39:22 | 2021-02-25T14:39:22 | null |
UTF-8
|
Python
| false | false | 7,919 |
py
|
#!/usr/bin/python
from __future__ import absolute_import, division, print_function
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_pkg_firewall_policy_vpndstnode
short_description: no description
description:
- This module is able to configure a FortiManager device.
- Examples include all parameters and values which need to be adjusted to data sources before usage.
version_added: "2.10"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Frank Shen (@fshen01)
- Hongbin Lu (@fgtdev-hblu)
notes:
- Running in workspace locking mode is supported in this FortiManager module, the top
level parameters workspace_locking_adom and workspace_locking_timeout help do the work.
- To create or update an object, use state present directive.
- To delete an object, use state absent directive.
- Normally, running one module can fail when a non-zero rc is returned. you can also override
the conditions to fail or succeed with parameters rc_failed and rc_succeeded
options:
bypass_validation:
description: only set to True when module schema diffs with FortiManager API structure, module continues to execute without validating parameters
required: false
type: bool
default: false
workspace_locking_adom:
description: the adom to lock for FortiManager running in workspace mode, the value can be global and others including root
required: false
type: str
workspace_locking_timeout:
description: the maximum time in seconds to wait for other user to release the workspace lock
required: false
type: int
default: 300
state:
description: the directive to create, update or delete an object
type: str
required: true
choices:
- present
- absent
rc_succeeded:
description: the rc codes list with which the conditions to succeed will be overriden
type: list
required: false
rc_failed:
description: the rc codes list with which the conditions to fail will be overriden
type: list
required: false
adom:
description: the parameter (adom) in requested url
type: str
required: true
pkg:
description: the parameter (pkg) in requested url
type: str
required: true
policy:
description: the parameter (policy) in requested url
type: str
required: true
pkg_firewall_policy_vpndstnode:
description: the top level parameters set
required: false
type: dict
suboptions:
host:
type: str
description: no description
seq:
type: int
description: no description
subnet:
type: str
description: no description
'''
EXAMPLES = '''
- hosts: fortimanager-inventory
collections:
- fortinet.fortimanager
connection: httpapi
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
tasks:
- name: no description
fmgr_pkg_firewall_policy_vpndstnode:
bypass_validation: False
workspace_locking_adom: <value in [global, custom adom including root]>
workspace_locking_timeout: 300
rc_succeeded: [0, -2, -3, ...]
rc_failed: [-2, -3, ...]
adom: <your own value>
pkg: <your own value>
policy: <your own value>
state: <value in [present, absent]>
pkg_firewall_policy_vpndstnode:
host: <value of string>
seq: <value of integer>
subnet: <value of string>
'''
RETURN = '''
request_url:
description: The full url requested
returned: always
type: str
sample: /sys/login/user
response_code:
description: The status of api request
returned: always
type: int
sample: 0
response_message:
description: The descriptive message of the api response
type: str
returned: always
sample: OK.
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import NAPIManager
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_galaxy_version
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_parameter_bypass
def main():
jrpc_urls = [
'/pm/config/adom/{adom}/pkg/{pkg}/firewall/policy/{policy}/vpn_dst_node'
]
perobject_jrpc_urls = [
'/pm/config/adom/{adom}/pkg/{pkg}/firewall/policy/{policy}/vpn_dst_node/{vpn_dst_node}'
]
url_params = ['adom', 'pkg', 'policy']
module_primary_key = 'seq'
module_arg_spec = {
'bypass_validation': {
'type': 'bool',
'required': False,
'default': False
},
'workspace_locking_adom': {
'type': 'str',
'required': False
},
'workspace_locking_timeout': {
'type': 'int',
'required': False,
'default': 300
},
'rc_succeeded': {
'required': False,
'type': 'list'
},
'rc_failed': {
'required': False,
'type': 'list'
},
'state': {
'type': 'str',
'required': True,
'choices': [
'present',
'absent'
]
},
'adom': {
'required': True,
'type': 'str'
},
'pkg': {
'required': True,
'type': 'str'
},
'policy': {
'required': True,
'type': 'str'
},
'pkg_firewall_policy_vpndstnode': {
'required': False,
'type': 'dict',
'options': {
'host': {
'required': False,
'type': 'str'
},
'seq': {
'required': True,
'type': 'int'
},
'subnet': {
'required': False,
'type': 'str'
}
}
}
}
params_validation_blob = []
check_galaxy_version(module_arg_spec)
module = AnsibleModule(argument_spec=check_parameter_bypass(module_arg_spec, 'pkg_firewall_policy_vpndstnode'),
supports_check_mode=False)
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
fmgr = NAPIManager(jrpc_urls, perobject_jrpc_urls, module_primary_key, url_params, module, connection, top_level_schema_name='data')
fmgr.validate_parameters(params_validation_blob)
fmgr.process_curd()
else:
module.fail_json(msg='MUST RUN IN HTTPAPI MODE')
module.exit_json(meta=module.params)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
02ab6ce4b0a5e3cc8f4857f83855687843f7324c
|
29f65ef4059ba04c20558f3be36c06fe3879a8e6
|
/c1/func.py
|
a173c0eafdf8e495d94cfb2dc8c14bfc80c1e2be
|
[] |
no_license
|
kobe24shou/pythonwebdev
|
d9c912bd9304802069bc41345b054b065a173272
|
c7c6c5af69e7d8783e5c8b15f75e9ca61ed6a03f
|
refs/heads/master
| 2020-03-17T23:01:41.787573 | 2018-06-06T14:11:21 | 2018-06-06T14:11:21 | 134,028,551 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 417 |
py
|
#!/usr/bin/env python
# -*-coding:utf-8-*-
def sum(x, y):
return x + y
def total(x, y, z):
sum_of_two = sum(x, y)
sum_of_three = sum(sum_of_two, z)
return sum_of_two, sum_of_three
# 定义了没有参数和返回值 的 main()函数
def main():
print("return of sum:", sum(4, 6))
x, y = total(1, 7, 10)
print("return of total:", x, ",", y)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
cc2f066e03ede1f54ac46b07dad2bb6621a03d10
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_287/ch149_2020_04_13_19_29_39_088791.py
|
5364f78261dc18794532675b8b2199879ae98f9f
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 950 |
py
|
salario_bruto = int(input('Seu salário bruto: '))
n_dependentes=int(input('Quantos dependentes: ' ))
def faixa(sal):
if sal <= 1045:
return 0.075
elif sal <= 2089.6:
return 0.09
elif sal <= 3134.4:
return 0.12
else:
return 0.14
if salario_bruto <= 6101.06:
b=salario_bruto-(faixa(salario_bruto)*sal)-(n_dependentes*189.59)
else:
b=salario_bruto-(671.12)-(n_dependentes*189.59)
def deducao(c):
if c<=1903.98:
return 0
elif c<=2826.65:
return 142.8
elif c<=3751.05:
return 354.8
elif c<=4664.68:
return 636.13
else:
return 869.36
def aliquota(d):
if d<=1903.98:
return 0
elif d<=2826.65:
return 0.075
elif d<=3751.05:
return 0.15
elif d<=4664.68:
return 0.225
else:
return 0.275
IRRF=(b*aliquota(b))-ded(b)
print("Sua contribuição para o INSS é de: ",IRRF)
|
[
"[email protected]"
] | |
b5be03bae05e2c31bc7c6b3158b111ca8d5fc886
|
791ce6452fb555f953ed3adb1966b59abc7c2dbb
|
/arxiv_net/dashboard/assets/style.py
|
c359a7598a7dde951f38e4ceb3c9d495568f0370
|
[] |
no_license
|
mirandrom/arxiv-net
|
d63b76006d7cde62a4ba4e623ffa3971436455f5
|
86bdc7a878c8d1d4a0135ddd2785cb59ca638937
|
refs/heads/master
| 2023-03-21T13:37:30.567726 | 2019-12-05T23:25:24 | 2019-12-05T23:25:24 | 222,019,331 | 2 | 0 | null | 2021-03-20T02:10:29 | 2019-11-15T23:26:34 |
Python
|
UTF-8
|
Python
| false | false | 1,468 |
py
|
card_style = {
"box-shadow": "0 4px 5px 0 rgba(0,0,0,0.14), 0 1px 10px 0 rgba(0,0,0,0.12), 0 2px 4px -1px rgba(0,0,0,0.3)"
}
BLUES = ["rgb(210, 218, 255)", "rgb(86, 117, 255)", "rgb(8, 31, 139)",
"rgb(105, 125, 215)", "rgb(84, 107, 208)",
"rgb(210, 210, 210)", "rgb(102, 103, 107)", "rgb(19, 23, 37)", ]
gradients = ['rgb(115, 132, 212)', 'rgb(169, 120, 219)', 'rgb(211, 107, 218)',
'rgb(237, 84, 199)',
'rgb(244, 70, 157)', 'rgb(240, 90, 127)', 'rgb(238, 117, 124)',
'rgb(230, 193, 119)']
tab_style = {
'borderLeft' : 'thin lightgrey solid',
'borderRight': 'thin lightgrey solid',
'borderTop' : '2px white solid',
'boxShadow' : 'inset 0px -1px 0px 0px lightgrey',
'fontSize' : '0.7vw',
'color' : 'black',
}
selected_style = {
'borderLeft' : 'thin lightgrey solid',
'borderRight' : 'thin lightgrey solid',
'background-image': f"linear-gradient(to top left, {','.join(gradients[:4])})",
'color' : 'white',
'fontSize' : '0.7vw',
}
container_style = {
# 'width' : '100%',
'verticalAlign': 'middle',
# 'display' : 'inlineBlock',
# 'boxShadow': 'inset 0px -1px 0px 0px lightgrey',
'alignItems' : 'center',
'padding' : '20px ',
}
# EXTERNAL CSS / JS
# app.css.config.serve_locally = True
# app.scripts.config.serve_locally = True
# app.config['suppress_callback_exceptions'] = True
|
[
"[email protected]"
] | |
2aa7d7541d47bf6cbc5349b3cb975f5eb6b55412
|
29145db13229d311269f317bf2819af6cba7d356
|
/may easy/maxVal.py
|
91313d4b8983c93bfc3cfa232fbdb5c36ee8edff
|
[] |
no_license
|
rocket3989/hackerEarth2019
|
802d1ca6fd03e80657cbe07a3f123e087679af4d
|
42c0a7005e52c3762496220136cc5c1ee93571bb
|
refs/heads/master
| 2021-07-05T01:32:42.203964 | 2020-12-22T03:40:20 | 2020-12-22T03:40:20 | 211,607,143 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 209 |
py
|
fib = [1, 1]
while True:
fib.append(fib[-1] + fib[-2])
if fib[-1] > 10 ** 18:
break
N = int(input())
for val in fib:
if val <= N:
continue
print(val)
break
|
[
"[email protected]"
] | |
969d2be266219f2b062ad7111a43f44275354f4d
|
13b2f7ca4bbad32b0ce7d547399e6097580ae097
|
/bfs+dfs/1260_DFS와 BFS.py
|
f69616d0dba433892b0d30f2d1628280ae3b9b5c
|
[] |
no_license
|
hksoftcorn/review
|
dadbd3a4ee7961282bfefd697a97f6ccf78dbe83
|
474aef3747c135c54322ff28261d2a6812a3d9a0
|
refs/heads/master
| 2023-06-17T05:41:50.178831 | 2021-07-11T23:30:27 | 2021-07-11T23:30:27 | 385,072,542 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 708 |
py
|
def dfs(v):
visited[v] = 1
for w in sorted(G[v]):
if not visited[w]:
dfs_path.append(w)
dfs(w)
def bfs(v):
visit = [0] * (N+1)
visit[v] = 1
Q = [v]
while Q:
current = Q.pop(0)
for w in sorted(G[current]):
if not visit[w]:
visit[w] = 1
bfs_path.append(w)
Q.append(w)
N, E, V = map(int, input().split())
G = [[] for _ in range(N + 1)]
visited = [0] * (N + 1)
for _ in range(E):
u, v = map(int, input().split())
G[u].append(v)
G[v].append(u)
dfs_path = [V]
dfs(V)
print(' '.join(map(str, dfs_path)))
bfs_path = [V]
bfs(V)
print(' '.join(map(str, bfs_path)))
|
[
"[email protected]"
] | |
8e1117685899d2bf068c219a6f66312448e008ff
|
9131dd03ff2880fca2a5883572784f8e51046e41
|
/env/lib/python3.6/site-packages/clicksend_client/models/delivery_issue.py
|
84f1503041f46cfe49989d1ade2142787157ff54
|
[] |
no_license
|
aviadm24/coronaap
|
fe10619ae42a8c839cd0a2c2c522187c5f21fbc7
|
5608c2d77cb3441b48ba51da04c06a187fb09488
|
refs/heads/master
| 2022-12-09T21:35:17.179422 | 2021-01-28T08:21:49 | 2021-01-28T08:21:49 | 249,938,200 | 0 | 0 | null | 2021-09-22T18:47:51 | 2020-03-25T09:36:10 |
JavaScript
|
UTF-8
|
Python
| false | false | 7,502 |
py
|
# coding: utf-8
"""
ClickSend v3 API
This is an official SDK for [ClickSend](https://clicksend.com) Below you will find a current list of the available methods for clicksend. *NOTE: You will need to create a free account to use the API. You can register [here](https://dashboard.clicksend.com/#/signup/step1/)..* # noqa: E501
OpenAPI spec version: 3.1
Contact: [email protected]
Generated by: https://github.com/clicksend-api/clicksend-codegen.git
"""
import pprint
import re # noqa: F401
import six
class DeliveryIssue(object):
"""NOTE: This class is auto generated by the clicksend code generator program.
Do not edit the class manually.
"""
"""
Attributes:
clicksend_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
clicksend_types = {
'message_id': 'str',
'type': 'str',
'description': 'str',
'client_comments': 'str',
'email_address': 'str'
}
attribute_map = {
'message_id': 'message_id',
'type': 'type',
'description': 'description',
'client_comments': 'client_comments',
'email_address': 'email_address'
}
discriminator_value_class_map = {
}
def __init__(self, message_id=None, type=None, description=None, client_comments=None, email_address=None): # noqa: E501
"""DeliveryIssue - a model defined in Swagger""" # noqa: E501
self._message_id = None
self._type = None
self._description = None
self._client_comments = None
self._email_address = None
self.discriminator = 'classType'
if message_id is not None:
self.message_id = message_id
self.type = type
self.description = description
if client_comments is not None:
self.client_comments = client_comments
self.email_address = email_address
@property
def message_id(self):
"""Gets the message_id of this DeliveryIssue. # noqa: E501
The message id of the message. # noqa: E501
:return: The message_id of this DeliveryIssue. # noqa: E501
:rtype: str
"""
return self._message_id
@message_id.setter
def message_id(self, message_id):
"""Sets the message_id of this DeliveryIssue.
The message id of the message. # noqa: E501
:param message_id: The message_id of this DeliveryIssue. # noqa: E501
:type: str
"""
self._message_id = message_id
@property
def type(self):
"""Gets the type of this DeliveryIssue. # noqa: E501
The type of message, must be one of the following values SMS, MMS, VOICE, EMAIL_MARKETING, EMAIL_TRANSACTIONAL, FAX, POST. # noqa: E501
:return: The type of this DeliveryIssue. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this DeliveryIssue.
The type of message, must be one of the following values SMS, MMS, VOICE, EMAIL_MARKETING, EMAIL_TRANSACTIONAL, FAX, POST. # noqa: E501
:param type: The type of this DeliveryIssue. # noqa: E501
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def description(self):
"""Gets the description of this DeliveryIssue. # noqa: E501
The description of the message. # noqa: E501
:return: The description of this DeliveryIssue. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this DeliveryIssue.
The description of the message. # noqa: E501
:param description: The description of this DeliveryIssue. # noqa: E501
:type: str
"""
if description is None:
raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501
self._description = description
@property
def client_comments(self):
"""Gets the client_comments of this DeliveryIssue. # noqa: E501
The user's comments. # noqa: E501
:return: The client_comments of this DeliveryIssue. # noqa: E501
:rtype: str
"""
return self._client_comments
@client_comments.setter
def client_comments(self, client_comments):
"""Sets the client_comments of this DeliveryIssue.
The user's comments. # noqa: E501
:param client_comments: The client_comments of this DeliveryIssue. # noqa: E501
:type: str
"""
self._client_comments = client_comments
@property
def email_address(self):
"""Gets the email_address of this DeliveryIssue. # noqa: E501
The user's email address. # noqa: E501
:return: The email_address of this DeliveryIssue. # noqa: E501
:rtype: str
"""
return self._email_address
@email_address.setter
def email_address(self, email_address):
"""Sets the email_address of this DeliveryIssue.
The user's email address. # noqa: E501
:param email_address: The email_address of this DeliveryIssue. # noqa: E501
:type: str
"""
if email_address is None:
raise ValueError("Invalid value for `email_address`, must not be `None`") # noqa: E501
self._email_address = email_address
def get_real_child_model(self, data):
"""Returns the real base class specified by the discriminator"""
discriminator_value = data[self.discriminator].lower()
return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.clicksend_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(DeliveryIssue, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeliveryIssue):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
712ebb3e8e9c6daab9c2cd3b469cecab96797c6e
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_godfather.py
|
813d71c96e7c700883fb63b1932814bc31f99141
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 364 |
py
|
#calss header
class _GODFATHER():
def __init__(self,):
self.name = "GODFATHER"
self.definitions = [u'a male godparent', u'the leader of a criminal group, especially a mafia family']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"[email protected]"
] | |
c064647cd1304d7aff89c6683cd29e2b315cfa1e
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2863/60673/273869.py
|
625083ae06703fe4379f18234384daf60c110ffb
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 318 |
py
|
# 围墙高h 第i个人高a[i] 平常走路宽度为1 弯腰2
n, h = input().split(" ")
a = input().split(" ")
n = int(n)
h = int(n)
for i in range(n):
a[i] = int(a[i])
walkNum = 0
bendNum = 0
for i in range(n):
if (a[i] <= h):
walkNum += 1
else:
bendNum += 1
print(walkNum + bendNum * 2)
|
[
"[email protected]"
] | |
00256e1c2a75d6e2643d1a889bf9b296376e09eb
|
6be845bf70a8efaf390da28c811c52b35bf9e475
|
/windows/Resources/Dsz/PyScripts/Lib/dsz/mca/file/cmd/put/type_Params.py
|
7be73ef22b4325cbd9bcac9c3611c066cc82f983
|
[] |
no_license
|
kyeremalprime/ms
|
228194910bf2ed314d0492bc423cc687144bb459
|
47eea098ec735b2173ff0d4e5c493cb8f04e705d
|
refs/heads/master
| 2020-12-30T15:54:17.843982 | 2017-05-14T07:32:01 | 2017-05-14T07:32:01 | 91,180,709 | 2 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,221 |
py
|
# uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: type_Params.py
from types import *
import array
PARAMS_CREATE_FLAG_PERMANENT = 1
class CreateParams:
def __init__(self):
self.__dict__['flags'] = 0
self.__dict__['writeOffset'] = 0
self.__dict__['filePath'] = ''
self.__dict__['provider'] = 0
def __getattr__(self, name):
if name == 'flags':
return self.__dict__['flags']
if name == 'writeOffset':
return self.__dict__['writeOffset']
if name == 'filePath':
return self.__dict__['filePath']
if name == 'provider':
return self.__dict__['provider']
raise AttributeError("Attribute '%s' not found" % name)
def __setattr__(self, name, value):
if name == 'flags':
self.__dict__['flags'] = value
elif name == 'writeOffset':
self.__dict__['writeOffset'] = value
elif name == 'filePath':
self.__dict__['filePath'] = value
elif name == 'provider':
self.__dict__['provider'] = value
else:
raise AttributeError("Attribute '%s' not found" % name)
def Marshal(self, mmsg):
from mcl.object.Message import MarshalMessage
submsg = MarshalMessage()
submsg.AddU16(MSG_KEY_PARAMS_CREATE_FLAGS, self.__dict__['flags'])
submsg.AddU64(MSG_KEY_PARAMS_CREATE_WRITE_OFFSET, self.__dict__['writeOffset'])
submsg.AddStringUtf8(MSG_KEY_PARAMS_CREATE_FILE_PATH, self.__dict__['filePath'])
submsg.AddU32(MSG_KEY_PARAMS_CREATE_PROVIDER, self.__dict__['provider'])
mmsg.AddMessage(MSG_KEY_PARAMS_CREATE, submsg)
def Demarshal(self, dmsg, instance=-1):
import mcl.object.Message
msgData = dmsg.FindData(MSG_KEY_PARAMS_CREATE, mcl.object.Message.MSG_TYPE_MSG, instance)
submsg = mcl.object.Message.DemarshalMessage(msgData)
self.__dict__['flags'] = submsg.FindU16(MSG_KEY_PARAMS_CREATE_FLAGS)
try:
self.__dict__['writeOffset'] = submsg.FindU64(MSG_KEY_PARAMS_CREATE_WRITE_OFFSET)
except:
pass
self.__dict__['filePath'] = submsg.FindString(MSG_KEY_PARAMS_CREATE_FILE_PATH)
try:
self.__dict__['provider'] = submsg.FindU32(MSG_KEY_PARAMS_CREATE_PROVIDER)
except:
pass
class WriteParams:
def __init__(self):
self.__dict__['lastData'] = False
self.__dict__['chunkIndex'] = 0
self.__dict__['data'] = array.array('B')
def __getattr__(self, name):
if name == 'lastData':
return self.__dict__['lastData']
if name == 'chunkIndex':
return self.__dict__['chunkIndex']
if name == 'data':
return self.__dict__['data']
raise AttributeError("Attribute '%s' not found" % name)
def __setattr__(self, name, value):
if name == 'lastData':
self.__dict__['lastData'] = value
elif name == 'chunkIndex':
self.__dict__['chunkIndex'] = value
elif name == 'data':
self.__dict__['data'] = value
else:
raise AttributeError("Attribute '%s' not found" % name)
def Marshal(self, mmsg):
from mcl.object.Message import MarshalMessage
submsg = MarshalMessage()
submsg.AddBool(MSG_KEY_PARAMS_WRITE_LAST_DATA, self.__dict__['lastData'])
submsg.AddU32(MSG_KEY_PARAMS_WRITE_CHUNK_INDEX, self.__dict__['chunkIndex'])
submsg.AddData(MSG_KEY_PARAMS_WRITE_DATA, self.__dict__['data'])
mmsg.AddMessage(MSG_KEY_PARAMS_WRITE, submsg)
def Demarshal(self, dmsg, instance=-1):
import mcl.object.Message
msgData = dmsg.FindData(MSG_KEY_PARAMS_WRITE, mcl.object.Message.MSG_TYPE_MSG, instance)
submsg = mcl.object.Message.DemarshalMessage(msgData)
self.__dict__['lastData'] = submsg.FindBool(MSG_KEY_PARAMS_WRITE_LAST_DATA)
self.__dict__['chunkIndex'] = submsg.FindU32(MSG_KEY_PARAMS_WRITE_CHUNK_INDEX)
self.__dict__['data'] = submsg.FindData(MSG_KEY_PARAMS_WRITE_DATA)
|
[
"[email protected]"
] | |
13915155f7c20e488e358ce9a8fc7c78b8049d80
|
299fe2ca879e509798e95c00b7ba33914031f4a7
|
/eruditio/shared_apps/django_userhistory/userhistory.py
|
10aad86e7ff44123a9ea653ae8ca81813915a013
|
[
"MIT"
] |
permissive
|
genghisu/eruditio
|
dcf2390c98d5d1a7c1044a9221bf319cb7d1f0f6
|
5f8f3b682ac28fd3f464e7a993c3988c1a49eb02
|
refs/heads/master
| 2021-01-10T11:15:28.230527 | 2010-04-23T21:13:01 | 2010-04-23T21:13:01 | 50,865,100 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,733 |
py
|
from django_userhistory.models import UserTrackedContent
class UserHistoryRegistry(object):
"""
Registry for UserHistory handlers. Necessary so that only one
receiver is registered for each UserTrackedContent object.
"""
def __init__(self):
self._registry = {}
self._handlers = {}
user_tracked_contents = UserTrackedContent.objects.all()
for content in user_tracked_contents:
self.register(content.content_type, content.action)
def get_handler(self, content_name):
"""
Attempt to get a handler for target content type, based
on the following naming convention.
content_type.model_class()._meta.db_table as StudlyCaps + Handler
"""
import django_userhistory.handlers as handlers
def to_studly(x):
return "".join([token.capitalize() for token in x.split("_")])
handler_class = getattr(handlers,
"%sHandler" % (to_studly(content_name)),
handlers.BaseUserHistoryHandler)
return handler_class
def register(self, content_type, action):
"""
Registers a handler from django_userhistory.handlers with the target
content type.
"""
content_name = content_type.model_class()._meta.db_table
if not content_name in self._registry.keys():
HandlerClass = self.get_handler(content_name)
handler = HandlerClass(content_type, action)
self._registry[content_name] = content_type
self._handlers[content_name] = handler
user_history_registry = UserHistoryRegistry()
|
[
"genghisu@6a795458-236b-11df-a5e4-cb4ff25536bb"
] |
genghisu@6a795458-236b-11df-a5e4-cb4ff25536bb
|
e589e9b8d3a9feebdb918b5bc6c69646e2a2bba0
|
911d3ffa7f6687b7b2d5609f4d7bb1f907f1703a
|
/Conditional Statements - More Exercises/06. Pets.py
|
d518cc7b89d6286ab3fc57f9402ad4d4aa37db01
|
[] |
no_license
|
ivan-yosifov88/python_basics
|
923e5ba5dcdc5f2288f012eeb544176d1eb964e9
|
ee02f1b7566e49566f15c4285d92b04f8fa6a986
|
refs/heads/master
| 2023-03-05T21:49:24.191904 | 2021-02-24T12:36:03 | 2021-02-24T12:36:03 | 341,581,532 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 459 |
py
|
from math import floor, ceil
number_of_days = int(input())
left_food = int(input())
dog_food = float(input())
cat_food = float(input())
turtle_food = float(input())
kilogram_food_eaten = number_of_days * (dog_food + cat_food + turtle_food / 1000)
difference = abs(left_food - kilogram_food_eaten)
if left_food >= kilogram_food_eaten:
print(f"{floor(difference)} kilos of food left.")
else:
print(f"{ceil(difference)} more kilos of food are needed.")
|
[
"ivan.yosifov88gmail.com"
] |
ivan.yosifov88gmail.com
|
3f35f2a8b17f35df510599c29d815a6b083efd36
|
ff5892487c262ce845a9996a282d3a2fdb1a3b15
|
/URI_1254.py
|
17a978a92191caec16353d8fd8ca9417daec8b41
|
[] |
no_license
|
dankoga/URIOnlineJudge--Python-3.9
|
d424a47671f106d665a4e255382fc0ec3059096a
|
f1c99521caeff59be0843af5f63a74013b63f7f0
|
refs/heads/master
| 2023-07-15T08:32:11.040426 | 2021-09-03T13:27:17 | 2021-09-03T13:27:17 | 393,991,461 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 698 |
py
|
import re
while True:
try:
tag = input().lower()
except EOFError:
break
tag_replacement = input()
text = input()
text_replaced = []
index_begin = 0
index_end = 0
regex = re.compile(tag, re.IGNORECASE)
while index_end < len(text):
while index_end < len(text) and text[index_end] != '<':
index_end += 1
text_replaced += text[index_begin:index_end]
index_begin = index_end
while index_end < len(text) and text[index_end] != '>':
index_end += 1
text_replaced += regex.sub(tag_replacement, text[index_begin:index_end])
index_begin = index_end
print(''.join(text_replaced))
|
[
"[email protected]"
] | |
7870f65dc0b7e24d9079a084ded746c988bdb9bb
|
1bd3076902117867ec048210905195ba2aaaaa6b
|
/exercise/leetcode/python_src/by2017_Sep/Leet279.py
|
d70c3fe111c535970d12a2902656ed8da5306c9a
|
[] |
no_license
|
SS4G/AlgorithmTraining
|
d75987929f1f86cd5735bc146e86b76c7747a1ab
|
7a1c3aba65f338f6e11afd2864dabd2b26142b6c
|
refs/heads/master
| 2021-01-17T20:54:31.120884 | 2020-06-03T15:04:10 | 2020-06-03T15:04:10 | 84,150,587 | 2 | 0 | null | 2017-10-19T11:50:38 | 2017-03-07T03:33:04 |
Python
|
UTF-8
|
Python
| false | false | 809 |
py
|
class Solution(object):
"""
my first dp code
"""
def __init__(self):
self.dpstate = [0, 1, 2, 3, ] + ([-1, ] * 10000)
def numSquares(self, n):
"""
:type n: int
:rtype: int
"""
res = self.dpRecursive(n, self.dpstate)
return res
def dpRecursive(self, n, stateRecord):
if stateRecord[n] != -1:
return stateRecord[n]
else:
maxSqrt = int(n**0.5)
min = 0xffffffff
while maxSqrt >= 1:
tmp = self.dpRecursive(n - maxSqrt**2, stateRecord)
min = tmp if tmp < min else min
maxSqrt -= 1
stateRecord[n] = min + 1
return min + 1
if __name__ == "__main__":
s = Solution()
print(s.numSquares(6405))
|
[
"[email protected]"
] | |
2eae42fa8e4b1dc07aa735f7b8fc312778f409cd
|
4b4df51041551c9a855468ddf1d5004a988f59a2
|
/leetcode_python/Array/rotate-function.py
|
3d952365fd1c669f093f899be1b8236df3d9be1b
|
[] |
no_license
|
yennanliu/CS_basics
|
99b7ad3ef6817f04881d6a1993ec634f81525596
|
035ef08434fa1ca781a6fb2f9eed3538b7d20c02
|
refs/heads/master
| 2023-09-03T13:42:26.611712 | 2023-09-03T12:46:08 | 2023-09-03T12:46:08 | 66,194,791 | 64 | 40 | null | 2022-08-20T09:44:48 | 2016-08-21T11:11:35 |
Python
|
UTF-8
|
Python
| false | false | 4,546 |
py
|
"""
396. Rotate Function
Medium
You are given an integer array nums of length n.
Assume arrk to be an array obtained by rotating nums by k positions clock-wise. We define the rotation function F on nums as follow:
F(k) = 0 * arrk[0] + 1 * arrk[1] + ... + (n - 1) * arrk[n - 1].
Return the maximum value of F(0), F(1), ..., F(n-1).
The test cases are generated so that the answer fits in a 32-bit integer.
Example 1:
Input: nums = [4,3,2,6]
Output: 26
Explanation:
F(0) = (0 * 4) + (1 * 3) + (2 * 2) + (3 * 6) = 0 + 3 + 4 + 18 = 25
F(1) = (0 * 6) + (1 * 4) + (2 * 3) + (3 * 2) = 0 + 4 + 6 + 6 = 16
F(2) = (0 * 2) + (1 * 6) + (2 * 4) + (3 * 3) = 0 + 6 + 8 + 9 = 23
F(3) = (0 * 3) + (1 * 2) + (2 * 6) + (3 * 4) = 0 + 2 + 12 + 12 = 26
So the maximum value of F(0), F(1), F(2), F(3) is F(3) = 26.
Example 2:
Input: nums = [100]
Output: 0
Constraints:
n == nums.length
1 <= n <= 105
-100 <= nums[i] <= 100
"""
# V0
# IDEA : MATH
# first, we represent the F(1) op as below:
#
# F(0) = 0A + 1B + 2C +3D
#
# F(1) = 0D + 1A + 2B +3C
#
# F(2) = 0C + 1D + 2A +3B
#
# F(3) = 0B + 1C + 2D +3A
#
# then, by some math manipulation, we have below relation:
#
# set sum = 1A + 1B + 1C + 1D
#
# -> F(1) = F(0) + sum - 4D
#
# -> F(2) = F(1) + sum - 4C
#
# -> F(3) = F(2) + sum - 4B
#
# so we find the rules!
#
# => F(i) = F(i-1) + sum - n*A[n-i]
#
# https://www.cnblogs.com/grandyang/p/5869791.html
# http://bookshadow.com/weblog/2016/09/11/leetcode-rotate-function/
class Solution(object):
def maxRotateFunction(self, A):
size = len(A)
sums = sum(A)
sumn = sum(x * n for x, n in enumerate(A))
ans = sumn
for x in range(size - 1, 0, -1):
sumn += sums - size * A[x]
ans = max(ans, sumn)
return ans
# V0'
# IDEA : BRUTE FORCE (TLE)
class Solution(object):
def maxRotateFunction(self, nums):
# help func
def help(arr):
ans = 0
for i in range(len(arr)):
tmp = i * arr[i]
ans += tmp
return ans
# edge case
if not nums:
return 0
# rotate
ans = -float('inf')
for i in range(len(nums)):
tmp = nums.pop(-1)
nums.insert(0, tmp)
cur = help(nums)
ans = max(ans, cur)
#print("nums = " + str(nums) + " cur = " + str(cur))
return ans
# V1
# https://blog.csdn.net/fuxuemingzhu/article/details/83002609
# IDEA : MATH PATTERN
# -> SINCE
# F(0) = 0A + 1B + 2C +3D
# F(1) = 0D + 1A + 2B +3C
# F(2) = 0C + 1D + 2A +3B
# F(3) = 0B + 1C + 2D +3A
# -> SO
# F(1) = F(0) + sum - 4D
# F(2) = F(1) + sum - 4C
# F(3) = F(2) + sum - 4B
# -> THEN WE KNOW THE PATTERN OF ROTATE OPERATION IS ACTUAL :
# ---> F(i) = F(i-1) + sum - n * A[n-i]
class Solution:
def maxRotateFunction(self, A):
"""
:type A: List[int]
:rtype: int
"""
_sum = 0
N = len(A)
f = 0
for i, a in enumerate(A):
_sum += a
f += i * a
res = f
for i in range(N - 1, 0, -1):
f = f + _sum - N * A[i]
res = max(res, f) # since we want to calculate the MAX value of F(0), F(1), ..., F(n-1).
return res
### Test case
s=Solution()
assert s.maxRotateFunction([]) == 0
assert s.maxRotateFunction([7]) == 0
assert s.maxRotateFunction([7,2,1]) == 15
assert s.maxRotateFunction([4, 3, 2, 6]) == 26
assert s.maxRotateFunction([0,0,0,0]) == 0
assert s.maxRotateFunction([3,7,0,1]) == 28
assert s.maxRotateFunction([1,1,1,1]) == 6
assert s.maxRotateFunction([-1,-1,-1,-1]) == -6
assert s.maxRotateFunction([-1,10,-5,1]) == 29
# V1'
# http://bookshadow.com/weblog/2016/09/11/leetcode-rotate-function/
class Solution(object):
def maxRotateFunction(self, A):
"""
:type A: List[int]
:rtype: int
"""
size = len(A)
sums = sum(A)
sumn = sum(x * n for x, n in enumerate(A))
ans = sumn
for x in range(size - 1, 0, -1):
sumn += sums - size * A[x]
ans = max(ans, sumn)
return ans
# V2
# Time: O(n)
# Space: O(1)
class Solution(object):
def maxRotateFunction(self, A):
"""
:type A: List[int]
:rtype: int
"""
s = sum(A)
fi = 0
for i in range(len(A)):
fi += i * A[i]
result = fi
for i in range(1, len(A)+1):
fi += s - len(A) * A[-i]
result = max(result, fi)
return result
|
[
"[email protected]"
] | |
e6ac0a4377f1efeaa6ced9a1f60ff1064ee4f9d5
|
48894ae68f0234e263d325470178d67ab313c73e
|
/sa/apps/mrt/views.py
|
2436dec1eafdb325d310c8be9f817091229bae4b
|
[
"BSD-3-Clause"
] |
permissive
|
DreamerDDL/noc
|
7f949f55bb2c02c15ac2cc46bc62d957aee43a86
|
2ab0ab7718bb7116da2c3953efd466757e11d9ce
|
refs/heads/master
| 2021-05-10T18:22:53.678588 | 2015-06-29T12:28:20 | 2015-06-29T12:28:20 | 118,628,133 | 0 | 0 | null | 2018-01-23T15:19:51 | 2018-01-23T15:19:51 | null |
UTF-8
|
Python
| false | false | 4,323 |
py
|
# -*- coding: utf-8 -*-
##----------------------------------------------------------------------
## sa.mrt application
##----------------------------------------------------------------------
## Copyright (C) 2007-2011 The NOC Project
## See LICENSE for details
##----------------------------------------------------------------------
## Python modules
import datetime
## NOC modules
from noc.lib.app import ExtApplication, view
from noc.sa.models import (ManagedObjectSelector, ManagedObject,
ReduceTask, MRTConfig)
from noc.main.models import Permission
from noc.lib.serialize import json_decode
class MRTAppplication(ExtApplication):
"""
sa.mrt application
"""
def extra_permissions(self):
"""
Get list of additional permissions
:return:
"""
x = set([p.permission_name for p in
MRTConfig.objects.filter(is_active=True)])
return list(x)
@view(url="^(?P<task>[0-9a-zA-Z_\-]+)/$", method=["POST"],
access="launch", api=True)
def api_run(self, request, task):
"""
Run new MRT
:param request:
:param task:
:return:
"""
# Get task
config = MRTConfig.objects.filter(
name=task, is_active=True).first()
if not config:
return self.response_not_found("Task not found")
# Check permissions
pn = "sa:mrt:%s" % config.permission_name
if not Permission.has_perm(request.user, pn):
return self.response_forbidden(
"Permission denied: '%s' permission required" % pn)
# Parse request
try:
r = json_decode(request.raw_post_data)
except Exception, why:
return self.response_bad_request(str(why))
if type(r) != dict:
return self.response_bad_request("dict required")
if "selector" not in r:
return self.response_bad_request("'selector' is missed")
# Resolve objects from selector
try:
objects = ManagedObjectSelector.resolve_expression(r["selector"])
except ManagedObjectSelector.DoesNotExist, why:
return self.response_not_found(str(why))
except ManagedObject.DoesNotExist, why:
return self.response_not_found(str(why))
# Check all objects fall within MRTConfig selector
unauthorized = set(objects).difference(set(
config.selector.managed_objects))
if unauthorized:
return self.response_forbidden("Unauthorized objects: %s" % (
", ".join([o.name for o in unauthorized])
))
# Run MRT
timeout = r.get("timeout", None) or config.timeout
t = ReduceTask.create_task(
objects,
"pyrule:%s" % config.reduce_pyrule.name, {},
config.map_script, r.get("map_args", {}),
timeout)
return self.response_accepted(
location="/sa/mrt/%s/%d/" % (task, t.id))
@view(url="^(?P<task>[0-9a-zA-Z_\-]+)/(?P<task_id>\d+)/$", method=["GET"],
access="launch", api=True)
def api_result(self, request, task, task_id):
# Get task
config = MRTConfig.objects.filter(name=task, is_active=True).first()
if not config:
return self.response_not_found("Task not found")
# Check permissions
pn = "sa:mrt:%s" % config.permission_name
if not Permission.has_perm(request.user, pn):
return self.response_forbidden(
"Permission denied: '%s' permission required" % pn)
#
t = self.get_object_or_404(ReduceTask, id=int(task_id))
try:
r = t.get_result(block=False)
except ReduceTask.NotReady:
# Not ready
completed = t.maptask_set.filter(status__in=("C", "F")).count()
total = t.maptask_set.count()
return {
"ready": False,
"progress": int(completed * 100 / total),
"max_timeout": (t.stop_time - datetime.datetime.now()).seconds,
"result": None
}
# Return result
return {
"ready": True,
"progress": 100,
"max_timeout": 0,
"result": r
}
|
[
"[email protected]"
] | |
398e431632ab1e171a30c473667a6229cbf94728
|
76b983258793d294b81791ebe72591bfebf78625
|
/lib/ia32/optable.py
|
314689b3d1c369c472d481e50573e58dabea9a73
|
[
"BSD-2-Clause"
] |
permissive
|
lotusexpeditor/syringe
|
18ac9cb800a7fefb7d67e31936db6a84e47df9eb
|
34a8386b90f534f9a856d0a436bba04dbf5100bd
|
refs/heads/master
| 2023-02-08T10:08:20.295797 | 2020-12-27T00:06:09 | 2020-12-27T00:06:09 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,372 |
py
|
from ._optable import OperandLookupTable
from . import typesize
def Lookup(opcode):
'''Lookup specified opcode in the lookup table'''
res = ord(opcode[0])
if res == 0x0f:
res = ord(opcode[1])
return OperandLookupTable[res+0x100]
return OperandLookupTable[res]
def HasModrm(lookup):
'''Returns True if specified opcode requires a modrm byte'''
return bool(ord(lookup) & 0x80)
def HasImmediate(lookup):
'''Returns True if specified opcode contains an immediate value'''
return bool(ord(lookup) & 0x40)
def GetImmediateLength(lookup, prefixes):
res = ord(lookup) & 0x3f
opsizeindex = not int(b'\x66' in prefixes)
if res == 0x3f: # it sucks because i know python has such a horrible optimizer, and i need to redo this as a dict for that reason
size = [ 2*typesize.halfword, 2*typesize.word ][opsizeindex]
elif res == 0x3e:
size = [ typesize.byte, typesize.halfword ][opsizeindex]
elif res == 0x3d:
size = [ typesize.halfword, typesize.word ][opsizeindex]
elif res == 0x3c:
size = [ typesize.word, typesize.word*2][opsizeindex]
elif res == 0x3b:
size = [ typesize.word*2, typesize.halfword ][opsizeindex]
elif res == 0x3a:
size = [ typesize.halfword + typesize.word, typesize.word ][opsizeindex]
else:
size = res
return size
|
[
"[email protected]"
] | |
418128e933eadf203bb45c157fb1159c2f0fd3fc
|
04c21e01c7dd002d0d66f26f17294bbe25ab30c1
|
/src/core/serializers/authentication/reset_password.py
|
0e0b5a6f90c5a3f1927d314da4b45df747402d19
|
[] |
no_license
|
unbrokenguy/Q-n-A-rest-api
|
29d1a7614d761bf68f38bbbbbd731c3692afccf7
|
dd483993e304d6660c8c8f7518bf7414efd8ec28
|
refs/heads/master
| 2023-06-03T20:19:52.606677 | 2021-06-18T09:35:27 | 2021-06-18T09:35:27 | 376,749,787 | 0 | 0 | null | 2021-06-18T09:35:27 | 2021-06-14T08:08:44 |
Python
|
UTF-8
|
Python
| false | false | 394 |
py
|
from rest_framework import serializers
from core.models import User
class ResetPasswordSerializer(serializers.ModelSerializer):
"""
Reset password serializer check if new password is strong enough if not raises ValidationError.
"""
class Meta:
model = User
fields = ["password"]
extra_kwargs = {
"password": {"required": True},
}
|
[
"[email protected]"
] | |
7718f80d703242913200b6318bd12354622ff8e1
|
c1bd12405d244c5924a4b069286cd9baf2c63895
|
/azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/__init__.py
|
b9ace78ff9b74043c8e8e5253b7611a5e4bd9da3
|
[
"MIT"
] |
permissive
|
lmazuel/azure-sdk-for-python
|
972708ad5902778004680b142874582a284a8a7c
|
b40e0e36cc00a82b7f8ca2fa599b1928240c98b5
|
refs/heads/master
| 2022-08-16T02:32:14.070707 | 2018-03-29T17:16:15 | 2018-03-29T17:16:15 | 21,287,134 | 1 | 3 |
MIT
| 2019-10-25T15:56:00 | 2014-06-27T19:40:56 |
Python
|
UTF-8
|
Python
| false | false | 952 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .linked_services_operations import LinkedServicesOperations
from .data_sources_operations import DataSourcesOperations
from .workspaces_operations import WorkspacesOperations
from .storage_insights_operations import StorageInsightsOperations
from .saved_searches_operations import SavedSearchesOperations
__all__ = [
'LinkedServicesOperations',
'DataSourcesOperations',
'WorkspacesOperations',
'StorageInsightsOperations',
'SavedSearchesOperations',
]
|
[
"[email protected]"
] | |
234f603a62fbcfc25412c15d4df79e54e6129073
|
60f95eff7c43f788af2420813c371152c1e2e5eb
|
/hulk/broker/oanda/common/constants.py
|
b37ffd670df513efa144a0da60298fba8d27b29e
|
[
"BSD-3-Clause"
] |
permissive
|
webclinic017/hulk
|
1667c508acb061a8120dc415978a72e83dc38f54
|
de326ca1554dc743e225cef4e4b1e2fd4f5090c6
|
refs/heads/master
| 2022-03-22T20:07:23.276317 | 2019-12-02T01:10:43 | 2019-12-02T01:11:39 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 329 |
py
|
from ....base.models import AccountType
OANDA_ENVIRONMENTS = {
"streaming": {
AccountType.REAL: "stream-fxtrade.oanda.com",
AccountType.DEMO: "stream-fxpractice.oanda.com",
},
"api": {
AccountType.REAL: "api-fxtrade.oanda.com",
AccountType.DEMO: "api-fxpractice.oanda.com",
}
}
|
[
"[email protected]"
] | |
4e86e0e6ff825aaff5a9add1e218622ecce984ed
|
ca75f7099b93d8083d5b2e9c6db2e8821e63f83b
|
/z2/part2/batch/jm/parser_errors_2/185179947.py
|
546d93745ac3129f50e6b4ee8ebd53a7475e4971
|
[
"MIT"
] |
permissive
|
kozakusek/ipp-2020-testy
|
210ed201eaea3c86933266bd57ee284c9fbc1b96
|
09aa008fa53d159672cc7cbf969a6b237e15a7b8
|
refs/heads/master
| 2022-10-04T18:55:37.875713 | 2020-06-09T21:15:37 | 2020-06-09T21:15:37 | 262,290,632 | 0 | 0 |
MIT
| 2020-06-09T21:15:38 | 2020-05-08T10:10:47 |
C
|
UTF-8
|
Python
| false | false | 916 |
py
|
from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 185179947
"""
"""
random actions, total chaos
"""
board = gamma_new(2, 2, 2, 2)
assert board is not None
assert gamma_move(board, 1, 1, 0) == 1
assert gamma_move(board, 2, 0, 1) == 1
board737265096 = gamma_board(board)
assert board737265096 is not None
assert board737265096 == ("2.\n"
".1\n")
del board737265096
board737265096 = None
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_move(board, 2, 1, 1) == 1
assert gamma_move(board, 1, 0, 1) == 0
assert gamma_move(board, 2, 0, 0) == 1
assert gamma_free_fields(board, 2) == 0
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_golden_move(board, 1, 1, 0) == 0
assert gamma_move(board, 2, 1, 1) == 0
gamma_delete(board)
|
[
"[email protected]"
] | |
ba97b518db15458fb817d0b872d2356510abc92f
|
df8438656cc2b15001a03d02949abec9a374cb6f
|
/test/normalizer_issue_files/E72.py
|
c39cacc09c68bb48fdc7e3972843eaa5190fa3fb
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0"
] |
permissive
|
gandhis1/parso
|
65fcc7540eb2664691b1ed12203faa617995c4ce
|
7b166db0b5b0b46a3b8b2f1ea5c9dcf57bc36197
|
refs/heads/master
| 2021-01-25T04:36:15.558393 | 2017-06-05T23:20:12 | 2017-06-05T23:20:12 | 93,455,487 | 0 | 0 | null | 2017-06-05T23:18:20 | 2017-06-05T23:18:20 | null |
UTF-8
|
Python
| false | false | 1,089 |
py
|
#: E721
if type(res) == type(42):
pass
#: E721
if type(res) != type(""):
pass
import types
if res == types.IntType:
pass
import types
#: E721:3
if type(res) is not types.ListType:
pass
#: E721:7 E721:35
assert type(res) == type(False) or type(res) == type(None)
#: E721:7
assert type(res) == type([])
#: E721:7
assert type(res) == type(())
#: E721:7
assert type(res) == type((0,))
#: E721:7
assert type(res) == type((0))
#: E721:7
assert type(res) != type((1, ))
#: E721:7
assert type(res) is type((1, ))
#: E721:7
assert type(res) is not type((1, ))
# Okay
#: E402
import types
if isinstance(res, int):
pass
if isinstance(res, str):
pass
if isinstance(res, types.MethodType):
pass
#: E721:3 E721:25
if type(a) != type(b) or type(a) == type(ccc):
pass
#: E721
type(a) != type(b)
#: E721
1 != type(b)
#: E721
type(b) != 1
1 != 1
try:
pass
#: E722
except:
pass
try:
pass
except Exception:
pass
#: E722
except:
pass
# Okay
fake_code = """"
try:
do_something()
except:
pass
"""
try:
pass
except Exception:
pass
|
[
"[email protected]"
] | |
6a18f0ae5be54533a66e3eca6087ba0b206673dc
|
781e2692049e87a4256320c76e82a19be257a05d
|
/all_data/exercism_data/python/saddle-points/3a5b8c06b75443c1ba4f3e45cd0ac791.py
|
bc8cf755d9ace6afd58ad1d6751c8c089df1218d
|
[] |
no_license
|
itsolutionscorp/AutoStyle-Clustering
|
54bde86fe6dbad35b568b38cfcb14c5ffaab51b0
|
be0e2f635a7558f56c61bc0b36c6146b01d1e6e6
|
refs/heads/master
| 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null |
UTF-8
|
Python
| false | false | 496 |
py
|
def saddle_points(mat):
spoints = set()
if mat:
rowLen = len(mat[0])
for row in mat:
if len(row) != rowLen:
raise ValueError("Irregular matrix. All rows must be the same length.")
for i, row in enumerate(mat):
for j in range(rowLen):
if row[j] == max(row):
if row[j] == min([mat[n][j] for n in range(len(mat))]):
spoints.add((i, j))
return spoints
|
[
"[email protected]"
] | |
8a2c478a7c55bd6e17bdb6130aaa087cc8b4487b
|
46035631e6d76ddea73603fcf139290f5cb4a991
|
/aws-python/__main__.py
|
dfb3e5a12cc77cf4fba6391e7fa0f6e30b3084db
|
[
"Apache-2.0"
] |
permissive
|
pulumi/templates
|
c6150fd66d5ba85a312d9ee3102ed456abebda8b
|
7c18d24ed7a4e0f5e00801bc133bb19dae630ee3
|
refs/heads/master
| 2023-08-21T12:46:56.389767 | 2023-08-04T20:36:26 | 2023-08-04T20:36:26 | 124,577,647 | 66 | 52 |
Apache-2.0
| 2023-09-13T00:07:57 | 2018-03-09T18:21:12 |
Go
|
UTF-8
|
Python
| false | false | 219 |
py
|
"""An AWS Python Pulumi program"""
import pulumi
from pulumi_aws import s3
# Create an AWS resource (S3 Bucket)
bucket = s3.Bucket('my-bucket')
# Export the name of the bucket
pulumi.export('bucket_name', bucket.id)
|
[
"[email protected]"
] | |
684fc7ef464c7a993ed509a48263880dc368f563
|
8d79fc03f6e5a6df41e824c8573d3ea4646146bf
|
/IB_Tree_PathSum_if_Exists.py
|
e0c2d820da30e788bdd1a62a83ce8e103a92d034
|
[] |
no_license
|
Cbkhare/Codes
|
3bea294dd0f2ec99e7e0ef0b7ff976cbe1765b7f
|
5b535795cdd742b7810ea163e0868b022736647d
|
refs/heads/master
| 2021-10-24T03:26:54.983073 | 2019-03-21T14:33:41 | 2019-03-21T14:33:41 | 111,226,735 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 970 |
py
|
class Solution:
# @param A : root node of tree
# @param B : integer
# @return an integer
def hasPathSum(self, A, B):
if not A: return 0
def validate(node, some):
#print (node.val, some)
if node.left == node.right == None:
if node.val==some:
return True
else:
return False
else:
if node.left:
r = validate(node.left,some=some-node.val)
if r:
# This is to avoid going to node.right if found true
return True
if node.right:
r = validate(node.right, some=some-node.val)
if r:
return True
return False
if validate(A,B):
return 1
else:
return 0
'''
https://www.interviewbit.com/problems/path-sum/
'''
|
[
"[email protected]"
] | |
6fae06163498067858f995086c69e2c86473bfc5
|
9876a02fb4f6c38271e41995296c6da4d2ec84af
|
/wagtail_review/templatetags/wagtailreview_tags.py
|
0f6c7e5ea88157336aa4fe725a39f1f39153a035
|
[
"BSD-3-Clause"
] |
permissive
|
jacobtoppm/wagtail-review
|
423c19cecfa17ddeb22de6bb2a34baad0cd10fdb
|
23b81d7e5699ecb843a99da1aa207775a8b85bd6
|
refs/heads/master
| 2020-12-27T18:26:42.182847 | 2019-05-20T11:29:16 | 2019-05-20T11:29:16 | 238,005,148 | 0 | 0 |
BSD-3-Clause
| 2020-03-13T10:44:00 | 2020-02-03T16:05:08 | null |
UTF-8
|
Python
| false | false | 1,172 |
py
|
from django import template
from wagtail_review.forms import ResponseForm
register = template.Library()
@register.inclusion_tag('wagtail_review/annotate.html', takes_context=True)
def wagtailreview(context):
request = context['request']
review_mode = getattr(request, 'wagtailreview_mode', None)
reviewer = getattr(request, 'wagtailreview_reviewer', None)
if review_mode == 'respond' or review_mode == 'comment':
return {
'mode': review_mode,
'allow_annotations': (reviewer.review.status != 'closed'),
'show_closed': (reviewer.review.status == 'closed'),
'allow_responses': (review_mode == 'respond' and reviewer.review.status != 'closed'),
'reviewer': reviewer,
'token': reviewer.response_token,
'response_form': ResponseForm()
}
elif review_mode == 'view':
return {
'mode': review_mode,
'show_closed': False,
'allow_annotations': False,
'allow_responses': False,
'reviewer': reviewer,
'token': reviewer.view_token
}
else:
return {'mode': None}
|
[
"[email protected]"
] | |
18a16704f66dd1d340db3c65e8ea06fa3b6b70cd
|
59f64b5cf799e31c97b11828dba4787afb8f3f17
|
/hail/python/hail/ggplot/aes.py
|
5497f28d4d22e7863d89af491b89520fe20e5f4b
|
[
"MIT"
] |
permissive
|
hail-is/hail
|
2089e6f3b38548f13fa5c2a8ab67f5cfdd67b4f1
|
07a483ae0f46c66f3ed6fd265b48f48c06298f98
|
refs/heads/main
| 2023-09-01T15:03:01.450365 | 2023-09-01T02:46:35 | 2023-09-01T02:46:35 | 45,069,467 | 913 | 262 |
MIT
| 2023-09-14T21:53:32 | 2015-10-27T20:55:42 |
Python
|
UTF-8
|
Python
| false | false | 1,112 |
py
|
from collections.abc import Mapping
from hail.expr import Expression
from hail import literal
class Aesthetic(Mapping):
def __init__(self, properties):
self.properties = properties
def __getitem__(self, item):
return self.properties[item]
def __len__(self):
return len(self.properties)
def __contains__(self, item):
return item in self.properties
def __iter__(self):
return iter(self.properties)
def __repr__(self):
return self.properties.__repr__()
def merge(self, other):
return Aesthetic({**self.properties, **other.properties})
def aes(**kwargs):
"""Create an aesthetic mapping
Parameters
----------
kwargs:
Map aesthetic names to hail expressions based on table's plot.
Returns
-------
:class:`.Aesthetic`
The aesthetic mapping to be applied.
"""
hail_field_properties = {}
for k, v in kwargs.items():
if not isinstance(v, Expression):
v = literal(v)
hail_field_properties[k] = v
return Aesthetic(hail_field_properties)
|
[
"[email protected]"
] | |
cffd05aad6e7ec0a8b97f7e2970e5b764364375f
|
2ac0e1ca51c473bba04bb08ea3be2015063a6a13
|
/galmeko/hospital/models.py
|
99b33f90648b820ca4d8c879fc1956e7d0906004
|
[] |
no_license
|
guarav00009/Gaurav-Pandey-Latest
|
2012aafe643e1fcc915626422e352d1e4411905a
|
aa38a47a46bc434f5ec608fde5eec0f0f58259b9
|
refs/heads/master
| 2020-12-22T10:03:17.325527 | 2020-01-28T13:53:52 | 2020-01-28T13:53:52 | 236,746,358 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,460 |
py
|
from django.db import models
from django.utils.translation import gettext_lazy as _
from django.conf import settings
from django.utils.html import format_html
from django.template.response import TemplateResponse
User = settings.AUTH_USER_MODEL
# Create your models here.
class Hospital(models.Model):
user = models.OneToOneField(User,on_delete=models.CASCADE)
hospital_name = models.CharField(max_length=100,blank=False,null=False)
phone = models.CharField(max_length=15, blank=True, null=True)
registration_no = models.CharField(max_length=30, unique=True)
address = models.CharField(max_length=150,blank=False,null=False)
file = models.ImageField(null=True, blank=True, upload_to="hospital/")
STATUS_CHOICES = (
(0, 'Pending'),
(1, 'Active'),
(2, 'Rejected'),
(3, 'Deleted'),
)
status = models.IntegerField(
_('status'), choices=STATUS_CHOICES, default=0)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = 'Hospital'
verbose_name_plural = 'Hospital'
def __str__(self):
return self.hospital_name
def file_link(self):
if self.file:
return format_html("<a href='%s' download>Download</a>" % (self.file.url,))
else:
return "No attachment"
file_link.allow_tags = True
file_link.short_description = 'Attachment'
|
[
"[email protected]"
] | |
6644f5d39c16c8085f33054bbbdde0e525062265
|
3c2323929499a4d81adada6f60ee64bde1e86cb2
|
/Simple_Backpropagation_Program/pytorch/views.py
|
ad9420f44e05bb17b3ef53f819f0390a0c1d09d5
|
[] |
no_license
|
GeonwooVincentKim/Backpropagation_Pytorch_Django
|
8ba22bb065aca35fed114420b749bb9f0a383688
|
41df659956e5e4e8126b272bd4f5053cdeb30663
|
refs/heads/master
| 2022-11-22T06:44:27.901139 | 2020-07-16T14:01:06 | 2020-07-16T14:01:06 | 273,230,382 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 814 |
py
|
from django.shortcuts import render
# Create your views here.
def index(request):
return render(request, 'index.html', {})
def sub(request):
# if request.method == 'POST':
# """
# Write down some code which is related to
# the number that you input.
# """
# """
# From now you are going to here
# to handle the data, you should make database
# that helps save data users input numbers into this
# Simple BackPropagation Algorithm.
# """
# context = {'form': }
return render(request, "sub/sub.html", {})
# return render(request, 'sub/sub.html', {})
def input(request):
return render(request, "sub/index.html", {})
# context = {'form': InputForm()}
# return render(request, "input/input.html", {})
|
[
"[email protected]"
] | |
68e429904fe72245794c1b21b63e11df67f9ce97
|
cb13037cdbd3e0ab6108670108e9497cc1e2a5a7
|
/0.leetcode/0.基本的算法/4.排序/1.冒泡排序Bubblesort.py
|
87ddbad13767a3782c1a06daaf71a3b8bf67122c
|
[] |
no_license
|
GMwang550146647/network
|
390fe0d1c72dcaca8b6d6dd1307adca0d56b55ce
|
576de9b993f7763789d25a995702b40c9bc6fa57
|
refs/heads/master
| 2023-06-15T04:42:54.306077 | 2021-07-12T06:06:02 | 2021-07-12T06:06:02 | 315,488,828 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 750 |
py
|
'''
1.冒泡排序:
把最大的数一个一个地丢到最前面(期间对比的时候,见到比自己小的就交换相邻两个)
优点:
在非顺序链表都可以用
'''
def bubbleSort(arr):
for i in range(len(arr)-1,0,-1):
for j in range(1,i+1):
if arr[j-1]>arr[j]:
arr[j],arr[j-1]=arr[j-1],arr[j]
return arr
def bubbleSortModified(arr):
for i in range(len(arr)-1,0,-1):
modified=False
for j in range(1,i+1):
if arr[j-1]>arr[j]:
arr[j],arr[j-1]=arr[j-1],arr[j]
modified=True
if not modified:
break
return arr
arr=[9,8,7,6,5,4,3,2,1]
print(bubbleSort(arr.copy()))
print(bubbleSortModified(arr.copy()))
|
[
"[email protected]"
] | |
349ea6ce098e264d8c03d7b91b59e71dad2c0350
|
d15eb2285895469a452867f76b033d0d64a4af5c
|
/Old_scripts_delete_20220804/Scripts/measurements/vna_autler_townes.py
|
7d51bf373377dba1857bae3f809c5d6dc426d33d
|
[] |
no_license
|
MRitter95/Kollar-Lab
|
45ac62ed7805ad9faeeb33b54be50f39950f3b2c
|
c905725c43af6a49fe5bb2a994d5180f2ba469c2
|
refs/heads/master
| 2023-08-19T03:38:43.761313 | 2023-08-10T17:49:00 | 2023-08-10T17:49:00 | 236,054,959 | 5 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,356 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 8 18:31:45 2020
@author: Kollarlab
"""
import time
import os
import numpy as np
import matplotlib.pyplot as plt
import userfuncs
import plotting_tools as plots
def get_default_settings():
settings = {}
#Save location
settings['scanname'] = 'scanname'
settings['meas_type'] = 'Autler_Townes'
settings['project_dir'] = r'Z:\Data\defaultdir'
#Sweep parameters
settings['CAV_Attenuation'] = 30
settings['Qbit_Attenuation'] = 10
settings['Autler_Attenuation'] = 10
settings['ext_flux'] = 0
settings['autler_power'] = -20
settings['start_autler_freq'] = 3.5e9
settings['stop_autler_freq'] = 4.5e9
settings['autler_points'] = 31
#VNA settings
settings['channel'] = 1
settings['avg_time'] = 30
settings['measurement'] = 'S21'
settings['start_freq'] = 3.5e9
settings['stop_freq'] = 4.5e9
settings['freq_points'] = 501
settings['RFpower'] = -25
settings['RFport'] = 3
settings['Mport'] = 2
settings['CAVport'] = 1
settings['CAVpower'] = -55
settings['CAVfreq'] = 8.12555e9
settings['ifBW'] = 1e3
return settings
def vna_autler_townes(instruments, settings):
#Instruments used
vna = instruments['VNA']
autlergen = instruments['RFsource']
SRS = instruments['SRS']
vna.reset()
#Data saving and naming
saveDir = userfuncs.saveDir(settings['project_dir'], settings['meas_type'])
stamp = userfuncs.timestamp()
filename = settings['scanname'] + '_' + stamp
scanname = settings['scanname']
CAV_Attenuation = settings['CAV_Attenuation']
Qbit_Attenuation = settings['Qbit_Attenuation']
Autler_Attenuation = settings['Autler_Attenuation']
settings['CAVpower'] = settings['CAVpower'] + CAV_Attenuation
settings['RFpower'] = settings['RFpower'] + Qbit_Attenuation
settings['autler_power'] = settings['autler_power'] + Autler_Attenuation
autlergen.power = settings['autler_power']
autlergen.output = 'On'
SRS.output = 'On'
SRS.voltage_ramp(settings['ext_flux'])
start_autler_freq = settings['start_autler_freq']
stop_autler_freq = settings['stop_autler_freq']
autler_points = settings['autler_points']
autler_freqs = np.round(np.linspace(start_autler_freq, stop_autler_freq, autler_points),-3)
findices = np.array(list(range(len(autler_freqs))))
if settings['reverse']:
findices = np.flipud(findices)
if settings['random']:
np.random.shuffle(findices)
mags = np.zeros((len(autler_freqs), settings['freq_points']))
phases = np.zeros((len(autler_freqs), settings['freq_points']))
tstart = time.time()
for freqind in findices:
autler_freq = autler_freqs[freqind]
print('Freq: {}, final freq: {}'.format(autler_freq, autler_freqs[-1]))
autlergen.freq = autler_freq
data = vna.spec_meas(settings)
vna.autoscale()
mags[freqind] = data['mag']
phases[freqind] = data['phase']
if freqind==0:
tstop = time.time()
singlePointTime = tstop-tstart
estimatedTime = singlePointTime*len(autler_freqs)
print(' ')
print('estimated time for this scan : ' + str(np.round(estimatedTime/60, 1)) + ' minutes')
print('estimated time for this scan : ' + str(np.round(estimatedTime/60/60, 2)) + ' hours')
print(' ')
freqs = data['xaxis']
labels = ['Freq (GHz)', 'Autler freq (GHz)']
full_data = {}
single_data = {}
if not settings['random']:
if settings['reverse']:
full_data = {}
full_data['xaxis'] = freqs
full_data['mags'] = mags[freqind:]
full_data['phases'] = phases[freqind:]
single_data = data
yaxis = autler_freqs[freqind:]
else:
full_data = {}
full_data['xaxis'] = freqs
full_data['mags'] = mags[0:freqind+1]
full_data['phases'] = phases[0:freqind+1]
single_data = data
yaxis = autler_freqs[0:freqind+1]
plots.simplescan_plot(full_data, single_data, yaxis, filename, labels, identifier='', fig_num=1)
userfuncs.SaveFull(saveDir, filename, ['full_data', 'single_data', 'autler_freqs', 'labels', 'filename'], locals(), expsettings=settings)
plt.savefig(os.path.join(saveDir, filename+'.png'), dpi = 150)
t2 = time.time()
print('Elapsed time: {}'.format(t2-tstart))
if settings['random']:
full_data = {}
full_data['xaxis'] = freqs
full_data['mags'] = mags
full_data['phases'] = phases
single_data = data
yaxis = autler_freqs
plots.simplescan_plot(full_data, single_data, yaxis, filename, labels, identifier='', fig_num=1)
# SRS.voltage_ramp(0.)
# SRS.output = 'Off'
autlergen.output = 'Off'
userfuncs.SaveFull(saveDir, filename, ['full_data', 'single_data', 'autler_freqs', 'labels', 'filename'], locals(), expsettings=settings)
plt.savefig(os.path.join(saveDir, filename+'.png'), dpi = 150)
|
[
"[email protected]"
] | |
1b70f2c79a348180971c5ae664a3ee3a8482424a
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03378/s251701721.py
|
49de078c7467f51e9581f9eab691c6a075c1561c
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 187 |
py
|
n,m,s = [int(x) for x in input().split()]
a = [int(x) for x in input().split()]
low = 0
high = 0
for i in range(m):
if a[i] < s:
low += 1
else:
high += 1
print(min(low,high))
|
[
"[email protected]"
] | |
333c75b551e4d62e7e80906e1b5ab7e2af0653cc
|
bd28f8a8dbcf7f2b4be3bcc0c0e656009191d379
|
/predict_nn/ranlp/rsr_dev/mi/ian.py
|
58b47a880118a587446b42c4ca6f575d9f0355ea
|
[
"MIT"
] |
permissive
|
nicolay-r/attitudes-extraction-ds
|
e2e5f9218408514ca1f3eff5edf88771e2f368ee
|
49a82843e6adbca35321aaaa08d05532e953a0fc
|
refs/heads/master
| 2022-08-30T04:51:14.133899 | 2020-05-28T11:06:01 | 2020-05-28T11:06:01 | 197,908,649 | 3 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,240 |
py
|
#!/usr/bin/python
import sys
sys.path.append('../../../../')
from predict_nn.ranlp.rsr_dev.config import TEST_ON_EPOCHS, MI_CONTEXTS_PER_OPINION
from networks.ranlp.io_rsr_dev import RaNLPConfTaskRuSentRelWithDevIO
from networks.mimlre.base import MIMLRE
from networks.context.architectures.ian import IAN
from networks.context.configurations.ian import IANConfig
from predict_nn.ranlp.mi_names import ModelNames
from networks.ranlp.model_mimlre import RaNLPConfTaskMIMLREModel
from networks.mimlre.configuration.base import MIMLRESettings
import predict_nn.ranlp.utils as utils
def modify_settings(settings):
assert(isinstance(settings, MIMLRESettings))
settings.modify_contexts_per_opinion(MI_CONTEXTS_PER_OPINION)
if __name__ == "__main__":
utils.run_cv_testing(model_name=ModelNames.MI_IAN,
create_network=lambda: MIMLRE(context_network=IAN()),
create_config=lambda: MIMLRESettings(context_settings=IANConfig()),
create_io=RaNLPConfTaskRuSentRelWithDevIO,
create_model=RaNLPConfTaskMIMLREModel,
modify_settings_callback=modify_settings,
test_on_epochs=TEST_ON_EPOCHS)
|
[
"[email protected]"
] | |
37fcce29634843a7c5c79899d2c6871a27f98257
|
3fb718b33d486d638402e5f5bb4eb028332bd54e
|
/Objects and Classes/Zoo.py
|
c657af3653914ff55c24c427eacb63f1fabf3133
|
[] |
no_license
|
lion963/SoftUni-Python-Fundamentals-
|
1c0aced0d770d0f5d0a4977543e945576425aff1
|
25fca7f88513d9e9b9ceb2741d9cb3b3c067b97b
|
refs/heads/master
| 2023-01-24T16:21:46.517847 | 2020-12-14T13:50:06 | 2020-12-14T13:50:06 | 297,916,630 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,279 |
py
|
class Zoo:
__animals = 0
def __init__(self, name):
self.name = name
self.mammals = []
self.fishes = []
self.birds = []
def add_animal(self, species, name):
if species == 'mammal':
self.mammals.append(name)
elif species == 'fish':
self.fishes.append(name)
elif species == 'bird':
self.birds.append(name)
self.__animals += 1
def get_info(self, species):
if species == 'mammal':
species_names = self.mammals
elif species == 'fish':
species_names = self.fishes
elif species == 'bird':
species_names = self.birds
names = ', '.join(species_names)
if species == 'mammal':
return f'Mammals in {zoo.name}: {names}'
elif species == 'fish':
return f'Fishes in {zoo.name}: {names}'
elif species == 'bird':
return f'Birds in {zoo.name}: {names}'
def get_total(self):
return f'Total animals: {self.__animals}'
zoo_name = input()
zoo = Zoo(zoo_name)
n = int(input())
for _ in range(n):
species, name = input().split(' ')
zoo.add_animal(species, name)
species = input()
print(zoo.get_info(species))
print(zoo.get_total())
|
[
"[email protected]"
] | |
a1e6752c97c13384efca970a958b0761d12d34cd
|
d2189145e7be2c836017bea0d09a473bf1bc5a63
|
/Reposiciones/reposicionesIsraelFP/reposicion31Ago18IsraelFP/fibonacciISraelFP.py
|
692bd0eafb663ca194cd985e7f9b1080a1142875
|
[] |
no_license
|
emilianoNM/Tecnicas3
|
12d10ce8d78803c8d2cd6a721786a68f7ee2809d
|
6ad7f0427ab9e23643a28ac16889bca8791421d0
|
refs/heads/master
| 2020-03-25T18:06:34.126165 | 2018-11-24T04:42:14 | 2018-11-24T04:42:14 | 144,013,045 | 3 | 5 | null | 2018-09-14T10:47:26 | 2018-08-08T12:49:57 |
Python
|
UTF-8
|
Python
| false | false | 306 |
py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 15 16:04:05 2018
@author: israel
"""
def fib(f):
if f == 1: return 1
if f == 2: return 1
return fib(f-1)+fib(f-2)
print "\t..:Fibonacci:.."
f=input("Cantidad de no. a hacer en Fibonacci: ")
print "> No. Fibonacci: ",fib(f)
|
[
"[email protected]"
] | |
40547c88ef4733a7b77c0d92fa0344e3439c408f
|
98efe1aee73bd9fbec640132e6fb2e54ff444904
|
/loldib/getratings/models/NA/na_aatrox/__init__.py
|
edb18cb382f17b02c1036fa9cc09ee67a24a63ab
|
[
"Apache-2.0"
] |
permissive
|
koliupy/loldib
|
be4a1702c26546d6ae1b4a14943a416f73171718
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
refs/heads/master
| 2021-07-04T03:34:43.615423 | 2017-09-21T15:44:10 | 2017-09-21T15:44:10 | 104,359,388 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 150 |
py
|
from .na_aatrox_top import *
from .na_aatrox_jng import *
from .na_aatrox_mid import *
from .na_aatrox_bot import *
from .na_aatrox_sup import *
|
[
"[email protected]"
] | |
390b65607f271bdd88f9fab4359365ad28e4f992
|
d92235bce35d7bf1b028ae417c6ceb8891b6c8b4
|
/dk_mnist_mlp_weightnorm.py
|
10c935941f332df7936c404f15dd57a9d282b466
|
[] |
no_license
|
capybaralet/BayesianHypernet
|
63faadc83aa95ec80e5d7805ec300c151734f93a
|
4d7bdc749b2fb9cf74e45c5b21ccc590b6f781e7
|
refs/heads/master
| 2020-12-30T15:30:54.687925 | 2017-05-15T21:38:15 | 2017-05-15T21:38:15 | 91,155,018 | 3 | 0 | null | 2017-05-13T06:41:49 | 2017-05-13T06:41:49 | null |
UTF-8
|
Python
| false | false | 5,345 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Fri May 12 17:46:38 2017
@author: Chin-Wei
"""
from modules import LinearFlowLayer, IndexLayer, PermuteLayer
from modules import CoupledDenseLayer, stochasticDenseLayer2
from utils import log_normal, log_stdnormal
from ops import load_mnist
import theano
import theano.tensor as T
from theano.tensor.shared_randomstreams import RandomStreams
srng = RandomStreams(seed=427)
floatX = theano.config.floatX
import lasagne
from lasagne import init
from lasagne import nonlinearities
from lasagne.layers import get_output
from lasagne.objectives import categorical_crossentropy as cc
import numpy as np
if 1:#def main():
"""
MNIST example
weight norm reparameterized MLP with prior on rescaling parameters
"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--perdatapoint',action='store_true')
parser.add_argument('--coupling',action='store_true')
parser.add_argument('--lrdecay',action='store_true')
parser.add_argument('--lr0',default=0.1,type=float)
parser.add_argument('--lbda',default=0.5,type=float)
parser.add_argument('--bs',default=32,type=int)
args = parser.parse_args()
print args
perdatapoint = args.perdatapoint
coupling = 1#args.coupling
lr0 = args.lr0
lrdecay = args.lrdecay
lbda = np.cast[floatX](args.lbda)
bs = args.bs
size = max(10,min(50000,args.size))
clip_grad = 100
max_norm = 100
# load dataset
filename = '/data/lisa/data/mnist.pkl.gz'
train_x, train_y, valid_x, valid_y, test_x, test_y = load_mnist(filename)
input_var = T.matrix('input_var')
target_var = T.matrix('target_var')
dataset_size = T.scalar('dataset_size')
lr = T.scalar('lr')
# 784 -> 20 -> 10
weight_shapes = [(784, 200),
(200, 10)]
num_params = sum(ws[1] for ws in weight_shapes)
if perdatapoint:
wd1 = input_var.shape[0]
else:
wd1 = 1
# stochastic hypernet
ep = srng.normal(std=0.01,size=(wd1,num_params),dtype=floatX)
logdets_layers = []
h_layer = lasagne.layers.InputLayer([None,num_params])
layer_temp = LinearFlowLayer(h_layer)
h_layer = IndexLayer(layer_temp,0)
logdets_layers.append(IndexLayer(layer_temp,1))
if coupling:
layer_temp = CoupledDenseLayer(h_layer,200)
h_layer = IndexLayer(layer_temp,0)
logdets_layers.append(IndexLayer(layer_temp,1))
h_layer = PermuteLayer(h_layer,num_params)
layer_temp = CoupledDenseLayer(h_layer,200)
h_layer = IndexLayer(layer_temp,0)
logdets_layers.append(IndexLayer(layer_temp,1))
weights = lasagne.layers.get_output(h_layer,ep)
# primary net
t = np.cast['int32'](0)
layer = lasagne.layers.InputLayer([None,784])
inputs = {layer:input_var}
for ws in weight_shapes:
num_param = ws[1]
w_layer = lasagne.layers.InputLayer((None,ws[1]))
weight = weights[:,t:t+num_param].reshape((wd1,ws[1]))
inputs[w_layer] = weight
layer = stochasticDenseLayer2([layer,w_layer],ws[1])
print layer.output_shape
t += num_param
layer.nonlinearity = nonlinearities.softmax
y = T.clip(get_output(layer,inputs), 0.001, 0.999) # stability
# loss terms
logdets = sum([get_output(logdet,ep) for logdet in logdets_layers])
logqw = - (0.5*(ep**2).sum(1) + 0.5*T.log(2*np.pi)*num_params + logdets)
#logpw = log_normal(weights,0.,-T.log(lbda)).sum(1)
logpw = log_stdnormal(weights).sum(1)
kl = (logqw - logpw).mean()
logpyx = - cc(y,target_var).mean()
loss = - (logpyx - kl/T.cast(dataset_size,floatX))
params = lasagne.layers.get_all_params([h_layer,layer])
grads = T.grad(loss, params)
mgrads = lasagne.updates.total_norm_constraint(grads,
max_norm=max_norm)
cgrads = [T.clip(g, -clip_grad, clip_grad) for g in mgrads]
updates = lasagne.updates.adam(cgrads, params,
learning_rate=lr)
train = theano.function([input_var,target_var,dataset_size,lr],
loss,updates=updates)
predict = theano.function([input_var],y.argmax(1))
##################
# TRAIN
X, Y = train_x[:size],train_y[:size]
Xt, Yt = valid_x,valid_y
print 'trainset X.shape:{}, Y.shape:{}'.format(X.shape,Y.shape)
N = X.shape[0]
epochs = 50
records=list()
t = 0
for e in range(epochs):
if lrdecay:
lr = lr0 * 10**(-e/float(epochs-1))
else:
lr = lr0
for i in range(N/bs):
x = X[i*bs:(i+1)*bs]
y = Y[i*bs:(i+1)*bs]
loss = train(x,y,N,lr)
if t%100==0:
print 'epoch: {} {}, loss:{}'.format(e,t,loss)
tr_acc = (predict(X)==Y.argmax(1)).mean()
te_acc = (predict(Xt)==Yt.argmax(1)).mean()
print '\ttrain acc: {}'.format(tr_acc)
print '\ttest acc: {}'.format(te_acc)
t+=1
records.append(loss)
|
[
"[email protected]"
] | |
775a119a67245fdb0d9299d512d4b793d1281268
|
0f931d9e5b74f52a57499364d858819873bdf469
|
/15.py
|
ea1afc8f020b5301aa75fbcffe5bfc0a28df61c1
|
[] |
no_license
|
estuprofe/AdventOfCode2019
|
43f4d6f96d580a1732d7932ea863613af270fe56
|
54450df616feef810fbd410ccc9d1b0670195e49
|
refs/heads/master
| 2022-04-03T11:35:30.553698 | 2019-12-22T03:21:33 | 2019-12-22T03:21:33 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,313 |
py
|
import fileinput
import heapq
import intcode
left, right, opposite = [2, 3, 1, 0], [3, 2, 0, 1], [1, 0, 3, 2]
dxs, dys = [0, 0, -1, 1], [-1, 1, 0, 0]
def traverse(program):
buf = []
gen = intcode.run(program, buf)
send = lambda d: buf.append(d + 1) or next(gen)
test = lambda d: send(d) and send(opposite[d])
d, p, cells, oxygen = 0, (0, 0), set(), None
while True:
if test(left[d]):
d = left[d] # turn left if possible
elif not test(d):
d = right[d] # else turn right if can't go straight
s = send(d)
if s == 0:
continue
p = (p[0] + dxs[d], p[1] + dys[d])
cells.add(p)
if s == 2:
oxygen = p
if p == (0, 0):
return cells, oxygen
def shortest_path(cells, source, target):
seen, queue = set(), [(0, source)]
while queue:
d, p = heapq.heappop(queue)
if p == target:
return d
seen.add(p)
for dx, dy in zip(dxs, dys):
q = (p[0] + dx, p[1] + dy)
if q in cells and q not in seen:
heapq.heappush(queue, (d + 1, q))
cells, oxygen = traverse(list(fileinput.input())[0])
print(shortest_path(cells, (0, 0), oxygen))
print(max(shortest_path(cells, cell, oxygen) for cell in cells))
|
[
"[email protected]"
] | |
ef907923a1970b33a70abe7364cdcf42e701a3d2
|
3cea6c6664d9489b4cfb33ea8580f8189b5839ff
|
/torchex/nn/modules/padding.py
|
ca8bc82e42fac577d1304747aa66ed99bb511ce6
|
[
"MIT"
] |
permissive
|
tarokiritani/torchex
|
81c24457337bdbf6ad103dd9ded5488b69b468bd
|
5e9d8f7f08a3931c2271e108d73226b1ee6b3efa
|
refs/heads/master
| 2020-04-12T17:55:02.960878 | 2018-12-14T09:37:46 | 2018-12-14T09:37:46 | 162,661,997 | 0 | 0 | null | 2018-12-21T03:40:19 | 2018-12-21T03:40:19 | null |
UTF-8
|
Python
| false | false | 2,265 |
py
|
import torch
import torch.nn as nn
class PeriodicPad2d(nn.Module):
"""
:params torch.Tensor input: Input(B, C, W, H)
# https://github.com/ZichaoLong/aTEAM/blob/master/nn/functional/utils.py
"""
def __init__(self,
pad_left: int=0, pad_right: int=0,
pad_top: int=0, pad_bottom: int=0):
super(PeriodicPad2d, self).__init__()
self.__doc__ = 'hello'
self.pad_left = pad_left
self.pad_right = pad_right
self.pad_top = pad_top
self.pad_bottom = pad_bottom
def forward(self, input):
assert input.dim() == 4, 'only support Input(B, C, W, H) or Input(B, C, H, W)'
B, C, H, W = input.size()
left_pad = input[:, :, :, W-(self.pad_left):]
right_pad = input[:, :, :, :self.pad_right]
input = torch.cat([left_pad, input, right_pad], dim=3)
top_pad = input[:, :, H-(self.pad_top):, :]
bottom_pad = input[:, :, :self.pad_bottom, :]
input = torch.cat([top_pad, input, bottom_pad], dim=2)
return input
class PeriodicPad3d(nn.Module):
'''
Only support isotropic padding
'''
def __init__(self, pad: int=0):
super(PeriodicPad3d, self).__init__()
self.pad = pad
def forward(self, input):
'''
:params torch.Tensor input: Input(B, C, D, W, H)
'''
assert input.dim() == 5, 'only support Input(B, C, D, W, H)'
B, C, D, H, W = input.size()
pad_0 = input[:, :, D-(self.pad):, :, :]
pad_1 = input[:, :, :self.pad, :, :]
input = torch.cat([pad_0, input, pad_1], dim=2)
pad_0 = input[:, :, :, H-(self.pad):, :]
pad_1 = input[:, :, :, :self.pad, :]
input = torch.cat([pad_0, input, pad_1], dim=3)
pad_0 = input[:, :, :, :, W-(self.pad):]
pad_1 = input[:, :, :, :, :self.pad]
input = torch.cat([pad_0, input, pad_1], dim=4)
return input
if __name__ == '__main__':
x = torch.range(1, 25).view(1, 1, 5, 5)
print(x)
pad = PeriodicPad2d(2, 2, 2, 1)
print(pad(x))
print(pad(x).shape)
x = torch.range(1, 27).view(1, 1, 3, 3, 3)
pad = PeriodicPad3d(1)
print(pad(x))
|
[
"[email protected]"
] | |
f303c4c5c52b859986065ba36976c2cd24f5fa30
|
4e8e9ed2a8fb69ed8b46066a8d967e4c107013a4
|
/main/control/comment.py
|
74b22b2e72d524f3e59cb31990a4cf5d1b395682
|
[
"MIT"
] |
permissive
|
welovecoding/vote4code
|
a57b3d155096d362dca47587ad2985b4201ef036
|
be265d553af35dc6c5322ecb3f7d5b3cf7691b75
|
refs/heads/master
| 2021-08-11T22:46:40.884030 | 2019-11-15T16:15:05 | 2019-11-15T16:15:05 | 90,191,931 | 14 | 0 |
MIT
| 2021-08-10T22:50:49 | 2017-05-03T20:46:02 |
Python
|
UTF-8
|
Python
| false | false | 5,801 |
py
|
# coding: utf-8
from google.appengine.ext import ndb
import flask
import flask_wtf
import wtforms
import auth
import config
import model
import util
from main import app
###############################################################################
# Update
###############################################################################
class CommentUpdateForm(flask_wtf.FlaskForm):
content = wtforms.TextAreaField(
model.Comment.content._verbose_name,
[wtforms.validators.required()],
filters=[util.strip_filter],
)
post_key = wtforms.SelectField(
model.Comment.post_key._verbose_name,
[wtforms.validators.required()],
choices=[],
)
@app.route('/comment/create/', methods=['GET', 'POST'])
@app.route('/comment/<int:comment_id>/update/', methods=['GET', 'POST'])
@auth.login_required
def comment_update(comment_id=0):
if comment_id:
comment_db = model.Comment.get_by_id(comment_id)
else:
comment_db = model.Comment(user_key=auth.current_user_key())
if not comment_db or comment_db.user_key != auth.current_user_key():
flask.abort(404)
form = CommentUpdateForm(obj=comment_db)
user_dbs, user_cursor = model.User.get_dbs(limit=-1)
post_dbs, post_cursor = model.Post.get_dbs(limit=-1)
form.post_key.choices = [(c.key.urlsafe(), c.title) for c in post_dbs]
if flask.request.method == 'GET' and not form.errors:
form.post_key.data = comment_db.post_key.urlsafe() if comment_db.post_key else None
if form.validate_on_submit():
form.post_key.data = ndb.Key(urlsafe=form.post_key.data) if form.post_key.data else None
form.populate_obj(comment_db)
comment_db.put()
return flask.redirect(flask.url_for('comment_view', comment_id=comment_db.key.id()))
return flask.render_template(
'comment/comment_update.html',
title=comment_db.content if comment_id else 'New Comment',
html_class='comment-update',
form=form,
comment_db=comment_db,
)
###############################################################################
# List
###############################################################################
@app.route('/comment/')
def comment_list():
comment_dbs, comment_cursor = model.Comment.get_dbs()
return flask.render_template(
'comment/comment_list.html',
html_class='comment-list',
title='Comment List',
comment_dbs=comment_dbs,
next_url=util.generate_next_url(comment_cursor),
api_url=flask.url_for('api.comment.list'),
)
###############################################################################
# View
###############################################################################
@app.route('/comment/<int:comment_id>/')
def comment_view(comment_id):
comment_db = model.Comment.get_by_id(comment_id)
if not comment_db:
flask.abort(404)
return flask.render_template(
'comment/comment_view.html',
html_class='comment-view',
title=comment_db.content,
comment_db=comment_db,
api_url=flask.url_for('api.comment', comment_key=comment_db.key.urlsafe() if comment_db.key else ''),
)
###############################################################################
# Admin List
###############################################################################
@app.route('/admin/comment/')
@auth.admin_required
def admin_comment_list():
comment_dbs, comment_cursor = model.Comment.get_dbs(
order=util.param('order') or '-modified',
)
return flask.render_template(
'comment/admin_comment_list.html',
html_class='admin-comment-list',
title='Comment List',
comment_dbs=comment_dbs,
next_url=util.generate_next_url(comment_cursor),
api_url=flask.url_for('api.admin.comment.list'),
)
###############################################################################
# Admin Update
###############################################################################
class CommentUpdateAdminForm(CommentUpdateForm):
pass
@app.route('/admin/comment/create/', methods=['GET', 'POST'])
@app.route('/admin/comment/<int:comment_id>/update/', methods=['GET', 'POST'])
@auth.admin_required
def admin_comment_update(comment_id=0):
if comment_id:
comment_db = model.Comment.get_by_id(comment_id)
else:
comment_db = model.Comment(user_key=auth.current_user_key())
if not comment_db:
flask.abort(404)
form = CommentUpdateAdminForm(obj=comment_db)
user_dbs, user_cursor = model.User.get_dbs(limit=-1)
post_dbs, post_cursor = model.Post.get_dbs(limit=-1)
form.post_key.choices = [(c.key.urlsafe(), c.title) for c in post_dbs]
if flask.request.method == 'GET' and not form.errors:
form.post_key.data = comment_db.post_key.urlsafe() if comment_db.post_key else None
if form.validate_on_submit():
form.post_key.data = ndb.Key(urlsafe=form.post_key.data) if form.post_key.data else None
form.populate_obj(comment_db)
comment_db.put()
return flask.redirect(flask.url_for('admin_comment_list', order='-modified'))
return flask.render_template(
'comment/admin_comment_update.html',
title=comment_db.content,
html_class='admin-comment-update',
form=form,
comment_db=comment_db,
back_url_for='admin_comment_list',
api_url=flask.url_for('api.admin.comment', comment_key=comment_db.key.urlsafe() if comment_db.key else ''),
)
###############################################################################
# Admin Delete
###############################################################################
@app.route('/admin/comment/<int:comment_id>/delete/', methods=['POST'])
@auth.admin_required
def admin_comment_delete(comment_id):
comment_db = model.Comment.get_by_id(comment_id)
comment_db.key.delete()
flask.flash('Comment deleted.', category='success')
return flask.redirect(flask.url_for('admin_comment_list'))
|
[
"[email protected]"
] | |
224115799dcddd421f082f520cd9f670ef3cd9cc
|
e81fabdd6988c787524755fac73aa9d3631fc64c
|
/tests/test_ops/test_early_stopping.py
|
286560c5fd38fb4cc2edbac48b85b01eeecdd9e7
|
[
"MIT"
] |
permissive
|
granularai/polyaxon-schemas
|
0aa06f15b7353ceb6d31f1e5cf63c269ab0e2ce4
|
017ae74701f21f12f0b25e75379681ea5d8baa9e
|
refs/heads/master
| 2022-08-30T00:05:40.888476 | 2020-05-19T17:22:46 | 2020-05-19T17:22:46 | 265,312,701 | 0 | 0 |
MIT
| 2020-05-19T17:16:38 | 2020-05-19T17:16:37 | null |
UTF-8
|
Python
| false | false | 1,874 |
py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from unittest import TestCase
from tests.utils import assert_equal_dict
from polyaxon_schemas.ops.group.early_stopping_policies import EarlyStoppingConfig
from polyaxon_schemas.ops.group.metrics import Optimization
class TestEarlyStoppingConfigs(TestCase):
def test_early_stopping(self):
config_dict = {
'metric': 'loss',
'value': 0.1,
}
config = EarlyStoppingConfig.from_dict(config_dict)
config_to_dict = config.to_dict()
assert config_to_dict.pop('optimization') == Optimization.MAXIMIZE
assert_equal_dict(config_to_dict, config_dict)
def test_early_stopping_with_median_policy(self):
config_dict = {
'metric': 'loss',
'value': 0.1,
'optimization': Optimization.MINIMIZE,
'policy': {'kind': 'median', 'evaluation_interval': 1}
}
config = EarlyStoppingConfig.from_dict(config_dict)
assert_equal_dict(config.to_dict(), config_dict)
def test_early_stopping_with_average_policy(self):
config_dict = {
'metric': 'loss',
'value': 0.1,
'optimization': Optimization.MINIMIZE,
'policy': {'kind': 'average', 'evaluation_interval': 1}
}
config = EarlyStoppingConfig.from_dict(config_dict)
assert_equal_dict(config.to_dict(), config_dict)
def test_early_stopping_with_truncation_policy(self):
config_dict = {
'metric': 'loss',
'value': 0.1,
'optimization': Optimization.MAXIMIZE,
'policy': {'kind': 'truncation', 'percent': 50, 'evaluation_interval': 1}
}
config = EarlyStoppingConfig.from_dict(config_dict)
assert_equal_dict(config.to_dict(), config_dict)
|
[
"[email protected]"
] | |
9ff9b1b4f5e88031f1b4c71bf900b366103e5a6f
|
b67efb7ac1832f2a70aa570f8025c69498a8cd71
|
/pgoapi/protos/POGOProtos/Data/Logs/FortSearchLogEntry_pb2.py
|
7469fad7bf20a643ec48fffd8c8889493a9bf5e5
|
[
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] |
permissive
|
PogoHop/pgoapi-hsvr
|
f1513d7548075a7defd21f1018bd59afcb79d78f
|
b5761159e0240bbb81ef6c257fe2eb1bc1ce2d47
|
refs/heads/master
| 2021-01-12T11:17:55.334203 | 2016-11-05T12:48:38 | 2016-11-05T12:48:38 | 72,892,081 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | true | 4,709 |
py
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Data/Logs/FortSearchLogEntry.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from POGOProtos.Inventory.Item import ItemData_pb2 as POGOProtos_dot_Inventory_dot_Item_dot_ItemData__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Data/Logs/FortSearchLogEntry.proto',
package='POGOProtos.Data.Logs',
syntax='proto3',
serialized_pb=_b('\n-POGOProtos/Data/Logs/FortSearchLogEntry.proto\x12\x14POGOProtos.Data.Logs\x1a(POGOProtos/Inventory/Item/ItemData.proto\"\xca\x01\n\x12\x46ortSearchLogEntry\x12?\n\x06result\x18\x01 \x01(\x0e\x32/.POGOProtos.Data.Logs.FortSearchLogEntry.Result\x12\x0f\n\x07\x66ort_id\x18\x02 \x01(\t\x12\x32\n\x05items\x18\x03 \x03(\x0b\x32#.POGOProtos.Inventory.Item.ItemData\x12\x0c\n\x04\x65ggs\x18\x04 \x01(\x05\" \n\x06Result\x12\t\n\x05UNSET\x10\x00\x12\x0b\n\x07SUCCESS\x10\x01\x62\x06proto3')
,
dependencies=[POGOProtos_dot_Inventory_dot_Item_dot_ItemData__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_FORTSEARCHLOGENTRY_RESULT = _descriptor.EnumDescriptor(
name='Result',
full_name='POGOProtos.Data.Logs.FortSearchLogEntry.Result',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUCCESS', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=284,
serialized_end=316,
)
_sym_db.RegisterEnumDescriptor(_FORTSEARCHLOGENTRY_RESULT)
_FORTSEARCHLOGENTRY = _descriptor.Descriptor(
name='FortSearchLogEntry',
full_name='POGOProtos.Data.Logs.FortSearchLogEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='POGOProtos.Data.Logs.FortSearchLogEntry.result', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fort_id', full_name='POGOProtos.Data.Logs.FortSearchLogEntry.fort_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='items', full_name='POGOProtos.Data.Logs.FortSearchLogEntry.items', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eggs', full_name='POGOProtos.Data.Logs.FortSearchLogEntry.eggs', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_FORTSEARCHLOGENTRY_RESULT,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=114,
serialized_end=316,
)
_FORTSEARCHLOGENTRY.fields_by_name['result'].enum_type = _FORTSEARCHLOGENTRY_RESULT
_FORTSEARCHLOGENTRY.fields_by_name['items'].message_type = POGOProtos_dot_Inventory_dot_Item_dot_ItemData__pb2._ITEMDATA
_FORTSEARCHLOGENTRY_RESULT.containing_type = _FORTSEARCHLOGENTRY
DESCRIPTOR.message_types_by_name['FortSearchLogEntry'] = _FORTSEARCHLOGENTRY
FortSearchLogEntry = _reflection.GeneratedProtocolMessageType('FortSearchLogEntry', (_message.Message,), dict(
DESCRIPTOR = _FORTSEARCHLOGENTRY,
__module__ = 'POGOProtos.Data.Logs.FortSearchLogEntry_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Data.Logs.FortSearchLogEntry)
))
_sym_db.RegisterMessage(FortSearchLogEntry)
# @@protoc_insertion_point(module_scope)
|
[
"[email protected]"
] | |
67dcd3ec7cdb0cc71d9f3b762d542f02506fbeb3
|
49ba5356bdc5df7dd9803b56fe507c5164a90716
|
/surface-area-of-3d-shapes/solution.py
|
a1de598aa85c92a605d01dfaf2403263d9ecf1e5
|
[] |
no_license
|
uxlsl/leetcode_practice
|
d80ad481c9d8ee71cce0f3c66e98446ced149635
|
d8ed762d1005975f0de4f07760c9671195621c88
|
refs/heads/master
| 2021-04-25T18:12:28.136504 | 2020-03-11T07:54:15 | 2020-03-11T07:54:15 | 121,472,384 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,509 |
py
|
# leetcode
# https://leetcode-cn.com/problems/surface-area-of-3d-shapes/
# 解法:
# 求六个方向的表面积
class Solution(object):
def surfaceArea(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
N = len(grid)
area = 0
# xy 正反面
for i in range(N):
h = 0
v = 0
for j in range(N):
if grid[i][j] > 0:
area += 2
if grid[i][j] > h:
h = grid[i][j]
if grid[j][i] > v:
v = grid[j][i]
if j > 0 and j+1 < N and grid[i][j-1] > grid[i][j] < grid[i][j+1]:
area +=2*(grid[i][j-1] - grid[i][j])
if i > 0 and i+1 < N and grid[i-1][j] > grid[i][j] < grid[i+1][j]:
area +=2*(grid[i-1][j] - grid[i][j])
area += 2*v
area += 2*h
return area
class Solution(object):
def surfaceArea(self, grid):
N = len(grid)
ans = 0
for r in xrange(N):
for c in xrange(N):
if grid[r][c]:
ans += 2
for nr, nc in ((r-1, c), (r+1, c), (r, c-1), (r,c+1)):
if 0 <= nr < N and 0 <= nc < N:
nval = grid[nr][nc]
else:
nval = 0
ans += max(grid[r][c] - nval, 0)
return ans
|
[
"[email protected]"
] | |
ce978302f88b0065282a8fb57be6ec347d9e2012
|
2fabea234735beefc980b77b213fcb0dfb394980
|
/tensorflow_probability/python/distributions/deprecated_linalg_test.py
|
e30bf6de1138043acd8d2544bd85b4b5b72eabca
|
[
"Apache-2.0"
] |
permissive
|
tarrou/probability
|
0eee452b525a6e6b3c7c98d467468e47f07e861b
|
d4d80a1c04ad0b3e98758ebc3f7f82887274384d
|
refs/heads/master
| 2020-08-08T11:16:42.441268 | 2019-12-06T17:35:17 | 2019-12-06T17:35:17 | 213,819,828 | 0 | 0 |
Apache-2.0
| 2019-10-09T04:20:19 | 2019-10-09T04:20:19 | null |
UTF-8
|
Python
| false | false | 2,518 |
py
|
# Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for deprecated_linalg functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions.deprecated_linalg import tridiag
from tensorflow_probability.python.internal import test_case
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import
@test_util.run_all_in_graph_and_eager_modes
class TridiagTest(test_case.TestCase):
def testWorksCorrectlyNoBatches(self):
self.assertAllEqual(
[[4., 8., 0., 0.],
[1., 5., 9., 0.],
[0., 2., 6., 10.],
[0., 0., 3, 7.]],
self.evaluate(tridiag(
[1., 2., 3.],
[4., 5., 6., 7.],
[8., 9., 10.])))
def testWorksCorrectlyBatches(self):
self.assertAllClose(
[[[4., 8., 0., 0.],
[1., 5., 9., 0.],
[0., 2., 6., 10.],
[0., 0., 3, 7.]],
[[0.7, 0.1, 0.0, 0.0],
[0.8, 0.6, 0.2, 0.0],
[0.0, 0.9, 0.5, 0.3],
[0.0, 0.0, 1.0, 0.4]]],
self.evaluate(tridiag(
[[1., 2., 3.],
[0.8, 0.9, 1.]],
[[4., 5., 6., 7.],
[0.7, 0.6, 0.5, 0.4]],
[[8., 9., 10.],
[0.1, 0.2, 0.3]])),
rtol=1e-5, atol=0.)
def testHandlesNone(self):
self.assertAllClose(
[[[4., 0., 0., 0.],
[0., 5., 0., 0.],
[0., 0., 6., 0.],
[0., 0., 0, 7.]],
[[0.7, 0.0, 0.0, 0.0],
[0.0, 0.6, 0.0, 0.0],
[0.0, 0.0, 0.5, 0.0],
[0.0, 0.0, 0.0, 0.4]]],
self.evaluate(tridiag(
diag=[[4., 5., 6., 7.],
[0.7, 0.6, 0.5, 0.4]])),
rtol=1e-5, atol=0.)
if __name__ == '__main__':
tf.test.main()
|
[
"[email protected]"
] | |
8c0d012d8d04a4973b14979e0731ec72a32bbdde
|
0728a2e165808cfe5651693a6e7f47804bfb085f
|
/ry/trunk-ry/rynok/controllers/category.py
|
2c8663f5cbf391dbaad2d949ff7d5a5f07a4cd0e
|
[] |
no_license
|
testTemtProj/OLD_PROJECT
|
5b026e072017f5135159b0940370fda860241d39
|
9e5b165f4e8acf9003536e05dcefd33a5ae46890
|
refs/heads/master
| 2020-05-18T15:30:24.543319 | 2013-07-23T15:17:32 | 2013-07-23T15:17:32 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,907 |
py
|
#coding: utf-8
""" Category Controller
"""
import logging
import rynok.lib.helpers as h
import json
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from webhelpers.html.builder import HTML
from rynok.lib.base import BaseController, render
from rynok.model.categoriesModel import CategoriesModel
from rynok.lib import helpers as h
from rynok.model.referenceModel import ReferenceModel
from rynok.model.settingsModel import SettingsModel
LOG = logging.getLogger(__name__)
class CategoryController(BaseController):
def __init__(self):
BaseController.__init__(self)
self.categories_model = CategoriesModel
def index(self, url):
category = self.categories_model.getByURL(url=url)
if not category:
return render('/error/error.mako.html')
if 'isLeaf' in category and category['isLeaf']:
return self.view(category=category)
cats = self.categories_model.getChildrens(category["ID"], non_empty=True)
c.cats = []
for cat in cats:
c.cats.append(cat)
c.category = category
return render('/category.mako.html')
def all(self):
cats = self.categories_model.getChildrens(categoryId=0, non_empty=True)
c.cats = []
for cat in cats:
c.cats.append(cat)
return render('/all.categories.mako.html')
def popular(self):
reference_model = ReferenceModel
settings = SettingsModel.get_popular_block_settings()
c.title = 'Популярные товары'
c.products = reference_model.get_popular_products(settings['categories'], settings['per_category'])
return render('/products.html')
def new(self):
reference_model = ReferenceModel
c.title = 'Новые товары'
c.products = reference_model.get_new_products(28)
return render('/products.html')
def view(self, category, page=1):
reference_model = ReferenceModel
if not isinstance(category, dict):
category = self.categories_model.getByURL(category)
c.category = category
c.error_message = None
sort_by = request.params.get('sort_by', 'price')
if sort_by == 'rating':
by = 'Rate'
elif sort_by == 'price':
by = 'price'
elif sort_by == 'popular':
by = 'popular'
try:
c.markets = json.loads(request.params.get('m_id', '[]'))
except ValueError:
c.markets = []
try:
c.vendors = json.loads(request.params.get('v_id', '[]'))
except ValueError:
c.vendors = []
sort_order = request.params.get('sort_order', 'desc')
try:
c.price_min = int(request.params.get('price_min', 0))
except:
c.price_min = 0
try:
c.perPage = int(request.params.get('per_page', 10))
except:
c.perPage = 10
c.currency = request.params.get('currency', 'UAH')
query = {'categoryId':int(category['ID']), c.currency: {'$gt': c.price_min-1}}
c.affordable_price = reference_model.get_max_price(query, c.currency) + 1
c.price_max = int(request.params.get('price_max', c.affordable_price))
query[c.currency]['$lt'] = c.price_max + 1
if len(c.markets) > 0 and len(c.vendors) > 0:
query['shopId'] = {'$in':c.markets}
query['vendor'] = {'$in':c.vendors}
if len(c.markets) > 0 and len(c.vendors) == 0:
query['shopId'] = {'$in':c.markets}
if len(c.markets) == 0 and len(c.vendors) > 0:
query['vendor'] = {'$in':c.vendors}
count_products = reference_model.get_count(query=query)
"""
if count_products == 0:
referer = request.headers.get('Referer', '')
http_host = request.environ.get('HTTP_HOST')
c.back_url = referer
if referer.find(http_host) == -1:
c.back_url = '/'
cats = self.categories_model.getChildrens(categoryId=0, non_empty=True)
c.cats = []
for cat in cats:
c.cats.append(cat)
c.noresult = u"По даной цене товары не найдены"
return render('/empty.category.mako.html')
"""
if count_products > 0:
c.products = reference_model.get_reference(where=query, perPage = c.perPage, page = int(page)-1, by=by, direction=sort_order)
else:
#get_less_products_query = query.copy()
#get_less_products_query[c.currency] = {'$lt' : c.price_min}
get_more_products_query = query.copy()
del(get_more_products_query[c.currency])# = {'$lte' : c.price_max}
#less_products = reference_model.get_reference(where=get_less_products_query, limit=2, by=c.currency, direction=-1)
#more_products = reference_model.get_reference(where=get_more_products_query, limit=2, by=c.currency, direction=1)
#c.products = more_products
print get_more_products_query
c.products = reference_model.get_reference(where=get_more_products_query, perPage = c.perPage, page = int(page)-1, by=by, direction=sort_order)
c.error_message = u"По даной цене товары не найдены, показаны без учета цены"
count_products = reference_model.get_count(query=get_more_products_query)
c.page = page
c.total_pages = count_products/c.perPage
if count_products%c.perPage:
c.total_pages += 1
c.sort_settings = {sort_by: sort_order}
c.current_url = category['URL']+'/'+str(page)
return render('/view.category.mako.html')
|
[
"[email protected]"
] | |
36686ecf3ef8dddacb386186f976e7db325b7da8
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/16/usersdata/122/6123/submittedfiles/triangulo.py
|
3ae57cd9af2aa0c78741ee0de80b08dafd3b0c19
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 369 |
py
|
# -*- coding: utf-8 -*-
from __future__ import division
import math
#ENTRADA
a=input('digite o valor do lado a:')
b=input('digite o valor do lado b:')
c=input('digite o valor do lado c:')
if a<(b+c):
print('S')
if (a**2)==(b**2)+(c**2):
print('Re')
elif (a**2)>(b**2)+(c**2):
print('Ob')
elif (a**2)<(b**2)+(c**2):
print('Ac')
|
[
"[email protected]"
] | |
9cc28d9f4c07c4648dc57207f4e8201627ae8aed
|
1b9075ffea7d4b846d42981b41be44238c371202
|
/2008/devel/programming/libs/libdbf/actions.py
|
242da707fa8723753f2298926612cdf827675c4e
|
[] |
no_license
|
pars-linux/contrib
|
bf630d4be77f4e484b8c6c8b0698a5b34b3371f4
|
908210110796ef9461a1f9b080b6171fa022e56a
|
refs/heads/master
| 2020-05-26T20:35:58.697670 | 2011-07-11T11:16:38 | 2011-07-11T11:16:38 | 82,484,996 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 829 |
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
from pisi.actionsapi import get
WorkDir = "libdbf"
def setup():
shelltools.chmod("configure")
shelltools.chmod("install-sh")
pisitools.dosed("configure","docbook-to-man","docbook2man")
autotools.rawConfigure("--prefix=/usr \
--disable-static")
def build():
autotools.make()
def install():
autotools.install()
pisitools.domo("po/tr.po", "tr", "libdbf.mo")
pisitools.insinto("/usr/share/doc/%s" % get.srcTAG(),"man/html")
pisitools.dodoc("ChangeLog", "COPYING", "README")
|
[
"MeW@a748b760-f2fe-475f-8849-a8a11d7a3cd2"
] |
MeW@a748b760-f2fe-475f-8849-a8a11d7a3cd2
|
cdbe41c2ec761eb560f3450e4eafcb73d802900a
|
cecd61903943d9f25f37605a344b1683ee958b11
|
/what_is_food/config/desktop.py
|
06f7e215806a55310c988083ea653bb469f998b8
|
[
"MIT"
] |
permissive
|
ashish-greycube/what_is_food
|
1f31ce461f97f8d4dccbbd078eb2190a4f785caa
|
6c4a327a721accf86667cc87c0b2976dcd09abe6
|
refs/heads/master
| 2022-12-15T06:07:07.137326 | 2020-08-27T04:05:21 | 2020-08-27T04:05:21 | 273,717,466 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 274 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"module_name": "What Is Food",
"color": "yellow",
"icon": "octicon octicon-device-mobile",
"type": "module",
"label": _("What Is Food")
}
]
|
[
"[email protected]"
] | |
40bf69fc32a19fddc23cf0e29fdc8fc40c238709
|
8ef8e6818c977c26d937d09b46be0d748022ea09
|
/nlp/dialogue_generation/cpm/pytorch/iluvatar/cpm/config/layers/self_multihead_attn.py
|
55be679404c3ac3d70ad62e15e3d9ac7aa90f005
|
[
"Apache-2.0"
] |
permissive
|
Deep-Spark/DeepSparkHub
|
eb5996607e63ccd2c706789f64b3cc0070e7f8ef
|
9d643e88946fc4a24f2d4d073c08b05ea693f4c5
|
refs/heads/master
| 2023-09-01T11:26:49.648759 | 2023-08-25T01:50:18 | 2023-08-25T01:50:18 | 534,133,249 | 7 | 6 |
Apache-2.0
| 2023-03-28T02:54:59 | 2022-09-08T09:07:01 |
Python
|
UTF-8
|
Python
| false | false | 3,926 |
py
|
import math
import torch
from torch import nn
from torch.nn import Parameter
import torch.nn.functional as F
from layers.self_multihead_attn_func import self_attn_func
from layers.fast_self_multihead_attn_func import fast_self_attn_func
from apex.normalization.fused_layer_norm import FusedLayerNorm
class SelfMultiheadAttn(nn.Module):
"""Multi-headed attention.
See "Attention Is All You Need" for more details.
"""
def __init__(self, embed_dim, num_heads, dropout=0., bias=False, impl='fast'):
super().__init__()
self.embed_dim = embed_dim
self.num_heads = num_heads
self.dropout = dropout
self.head_dim = embed_dim // num_heads
assert self.head_dim * \
num_heads == self.embed_dim, "embed_dim must be divisible by num_heads"
self.bias = bias
self.impl = impl
self.scaling = self.head_dim**-0.5
self.q_weight = Parameter(torch.Tensor(embed_dim, embed_dim))
self.k_weight = Parameter(torch.Tensor(embed_dim, embed_dim))
self.v_weight = Parameter(torch.Tensor(embed_dim, embed_dim))
self.out_proj_weight = Parameter(torch.Tensor(embed_dim, embed_dim))
self.q_bias = Parameter(torch.Tensor(embed_dim))
self.k_bias = Parameter(torch.Tensor(embed_dim))
self.v_bias = Parameter(torch.Tensor(embed_dim))
self.out_proj_bias = Parameter(torch.Tensor(embed_dim))
self.reset_parameters()
if impl == 'fast':
self.attn_func = fast_self_attn_func
elif impl == 'default':
self.attn_func = self_attn_func
else:
assert False, "Unsupported impl: {} !".format(impl)
def reset_parameters(self):
nn.init.xavier_uniform_(self.q_weight)
nn.init.xavier_uniform_(self.k_weight)
nn.init.xavier_uniform_(self.v_weight)
nn.init.xavier_uniform_(self.out_proj_weight)
nn.init.constant_(self.q_bias, 0.)
nn.init.constant_(self.k_bias, 0.)
nn.init.constant_(self.v_bias, 0.)
nn.init.constant_(self.out_proj_bias, 0.)
def forward(self, query, attn_mask=None, is_training=True):
"""Input shape: Time x Batch x Channel
Self-attention can be implemented by passing in the same arguments for
query, key and value. Future timesteps can be masked with the
`mask_future_timesteps` argument. Padding elements can be excluded from
the key by passing a binary ByteTensor (`key_padding_mask`) with shape:
batch x src_len, where padding elements are indicated by 1s.
"""
mask = attn_mask
input_weights = torch.cat([self.q_weight.view(self.num_heads, 1, self.head_dim, self.embed_dim), self.k_weight.view(self.num_heads, 1, self.head_dim,
self.embed_dim), self.v_weight.view(self.num_heads, 1, self.head_dim, self.embed_dim)], dim=1).reshape(3*self.embed_dim, self.embed_dim).contiguous()
input_bias = torch.cat([self.q_bias.view(self.num_heads, 1, self.head_dim), self.k_bias.view(
self.num_heads, 1, self.head_dim), self.v_bias.view(self.num_heads, 1, self.head_dim)], dim=1).reshape(3*self.embed_dim).contiguous()
if self.impl == 'fast':
outputs = self.attn_func(attn_mask is not None, is_training, self.num_heads, query,
input_weights, self.out_proj_weight, input_bias, self.out_proj_bias, mask, False, self.dropout)
else:
outputs = self.attn_func(attn_mask is not None, is_training, self.num_heads, self.scaling, query,
input_weights, self.out_proj_weight,
input_bias, self.out_proj_bias,
mask, False, self.dropout)
return outputs
|
[
"[email protected]"
] | |
33c8c7e6cc382a9dbcd9a3eb49171fbcf67e4e72
|
bedf68a6e2bb337d2848a4a55a24c71fd62484c7
|
/tests/test_NMT_architectures/bidir_deep_LSTM_ConditionalGRU.py
|
0151b7f437481dcaae0d6cdc7546fecde3951030
|
[
"MIT"
] |
permissive
|
19ai/nmt-keras
|
941d5bbffe1889d72e4d58ae77fd92d8db3b0df7
|
ec56acb619b0c4be0558f737d5d848971fa282db
|
refs/heads/master
| 2020-03-11T16:37:06.633273 | 2018-04-18T11:46:03 | 2018-04-18T11:46:03 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,746 |
py
|
import argparse
import pytest
from keras import backend as K
from config import load_parameters
from data_engine.prepare_data import build_dataset
from main import train_model, apply_NMT_model
from sample_ensemble import sample_ensemble
from score import score_corpus
def load_tests_params():
params = load_parameters()
params['BATCH_SIZE'] = 10
params['WEIGHT_DECAY'] = 1e-4
params['RECURRENT_WEIGHT_DECAY'] = 1e-4
params['DROPOUT_P'] = 0.01
params['RECURRENT_INPUT_DROPOUT_P'] = 0.01
params['RECURRENT_DROPOUT_P'] = 0.01
params['USE_NOISE'] = True
params['NOISE_AMOUNT'] = 0.01
params['USE_BATCH_NORMALIZATION'] = True
params['BATCH_NORMALIZATION_MODE'] = 1
params['SOURCE_TEXT_EMBEDDING_SIZE'] = 8
params['TARGET_TEXT_EMBEDDING_SIZE'] = 8
params['DECODER_HIDDEN_SIZE'] = 4
params['ENCODER_HIDDEN_SIZE'] = 4
params['ATTENTION_SIZE'] = params['DECODER_HIDDEN_SIZE']
params['SKIP_VECTORS_HIDDEN_SIZE'] = params['DECODER_HIDDEN_SIZE']
params['DOUBLE_STOCHASTIC_ATTENTION_REG'] = 0.7
params['RELOAD'] = 0
params['MAX_EPOCH'] = 2
return params
def test_NMT_Bidir_deep_LSTM_ConditionalGRU():
params = load_tests_params()
# Current test params: Two-layered LSTM - ConditionalGRU
params['BIDIRECTIONAL_ENCODER'] = True
params['N_LAYERS_ENCODER'] = 2
params['BIDIRECTIONAL_DEEP_ENCODER'] = False
params['ENCODER_RNN_TYPE'] = 'LSTM'
params['DECODER_RNN_TYPE'] = 'ConditionalGRU'
params['N_LAYERS_DECODER'] = 2
params['REBUILD_DATASET'] = True
dataset = build_dataset(params)
params['INPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['INPUTS_IDS_DATASET'][0]]
params['OUTPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['OUTPUTS_IDS_DATASET'][0]]
params['MODEL_NAME'] = \
params['TASK_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '_' + params['MODEL_TYPE'] + \
'_src_emb_' + str(params['SOURCE_TEXT_EMBEDDING_SIZE']) + \
'_bidir_' + str(params['BIDIRECTIONAL_ENCODER']) + \
'_enc_' + params['ENCODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_ENCODER']) + '_' + str(
params['ENCODER_HIDDEN_SIZE']) + \
'_dec_' + params['DECODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_DECODER']) + '_' + str(
params['DECODER_HIDDEN_SIZE']) + \
'_deepout_' + '_'.join([layer[0] for layer in params['DEEP_OUTPUT_LAYERS']]) + \
'_trg_emb_' + str(params['TARGET_TEXT_EMBEDDING_SIZE']) + \
'_' + params['OPTIMIZER'] + '_' + str(params['LR'])
params['STORE_PATH'] = K.backend() + '_test_train_models/' + params['MODEL_NAME'] + '/'
# Test several NMT-Keras utilities: train, sample, sample_ensemble, score_corpus...
train_model(params)
params['RELOAD'] = 2
apply_NMT_model(params)
parser = argparse.ArgumentParser('Parser for unit testing')
parser.dataset = params['DATASET_STORE_PATH'] + '/Dataset_' + params['DATASET_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '.pkl'
parser.text = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['SRC_LAN']
parser.splits = ['val']
parser.config = params['STORE_PATH'] + '/config.pkl'
parser.models = [params['STORE_PATH'] + '/epoch_' + str(2)]
parser.verbose = 0
parser.dest = None
parser.source = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['SRC_LAN']
parser.target = params['DATA_ROOT_PATH'] + '/' + params['TEXT_FILES']['val'] + params['TRG_LAN']
parser.weights = []
for n_best in [True, False]:
parser.n_best = n_best
sample_ensemble(parser, params)
score_corpus(parser, params)
if __name__ == '__main__':
pytest.main([__file__])
|
[
"[email protected]"
] | |
fe8974fa7e751cfea487290d10694d7ad661d211
|
491f29501fa7d484a5860f64aef3fa89fb18ca3d
|
/examples/mechanics/GeometricPrimitives/disk_on_box.py
|
275e8e9cb3d623f8b232906ba95792f7316f040e
|
[
"Apache-2.0"
] |
permissive
|
siconos/siconos-tutorials
|
e7e6ffbaaea49add49eddd317c46760393e3ef9a
|
0472c74e27090c76361d0b59283625ea88f80f4b
|
refs/heads/master
| 2023-06-10T16:43:13.060120 | 2023-06-01T07:21:25 | 2023-06-01T07:21:25 | 152,255,663 | 7 | 2 |
Apache-2.0
| 2021-04-08T12:00:39 | 2018-10-09T13:26:39 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 4,596 |
py
|
#!/usr/bin/env python
#
# Example of one object under gravity with one contactor and a ground
# using the Siconos proposed mechanics API
#
from siconos.mechanics.collision.tools import Contactor
from siconos.io.mechanics_run import MechanicsHdf5Runner, MechanicsHdf5Runner_run_options
from siconos.mechanics.collision.bullet import SiconosBulletOptions, SICONOS_BULLET_2D
import siconos.numerics as sn
import siconos.kernel as sk
import math
restart=False
if not restart:
# Creation of the hdf5 file for input/output
with MechanicsHdf5Runner() as io:
# Definition of a sphere
io.add_primitive_shape('Disk', 'Disk', (2,),
insideMargin=0.0, outsideMargin=0.0)
# Definition of the ground shape
io.add_primitive_shape('Ground', 'Box2d', (20, 1),
insideMargin=0.0, outsideMargin=0.0)
# Definition of a non smooth law. As no group ids are specified it
# is between contactors of group id 0.
io.add_Newton_impact_friction_nsl('contact', mu=0.1, e=0.5)
# The sphere object made with an unique Contactor : the sphere shape.
# As a mass is given, it is a dynamic system involved in contact
# detection and in the simulation. With no group id specified the
# Contactor belongs to group 0
io.add_object('disk', [Contactor('Disk')],
translation=[-1, 2.],
orientation = [math.pi/4.0],
velocity=[0, 0, 0.0],
mass=1., inertia =2.0)
# io.add_object('disk2', [Contactor('Disk')],
# translation=[0, 6.],
# velocity=[0, 0, -10.0],
# mass=1., inertia =2.0)
io.add_object('disk2', [Contactor('Disk')],
translation=[4*math.sqrt(2)/2., 2+4*math.sqrt(2)/2.],
orientation = [math.pi/4.0],
velocity=[0, 0, 0.0],
mass=1., inertia =2.0)
io.add_object('disk3', [Contactor('Disk')],
translation=[4*math.sqrt(2), 2.],
orientation = [math.pi/4.0],
velocity=[0, 0, -1.0],
mass=1., inertia =2.0)
# the ground object made with the ground shape. As the mass is
# not given, it is a static object only involved in contact
# detection.
io.add_object('ground', [Contactor('Ground')],
translation=[0, -.5])
# Run the simulation from the inputs previously defined and add
# results to the hdf5 file. The visualisation of the output may be done
# with the vview command.
bullet_options = SiconosBulletOptions()
bullet_options.worldScale = 1.0
bullet_options.contactBreakingThreshold = 0.04
bullet_options.dimension = SICONOS_BULLET_2D
bullet_options.perturbationIterations = 0
bullet_options.minimumPointsPerturbationThreshold = 0
options = sk.solver_options_create(sn.SICONOS_FRICTION_2D_NSGS)
options.iparam[sn.SICONOS_IPARAM_MAX_ITER] = 100000
options.dparam[sn.SICONOS_DPARAM_TOL] = 1e-8
T=2.0
if restart:
T=2.0
#T=1*0.001
hstep=0.01
run_options=MechanicsHdf5Runner_run_options()
run_options['t0']=0
run_options['T']=T
run_options['h']=hstep
run_options['bullet_options']=bullet_options
run_options['solver_options']=options
run_options['constraint_activation_threshold']=1e-05
run_options['Newton_options']=sk.SICONOS_TS_LINEAR
run_options['osns_assembly_type']= sk.GLOBAL_REDUCED
run_options['osi']= sk.MoreauJeanGOSI
run_options['verbose']=True
run_options['with_timer']=True
run_options['explode_Newton_solve']=True
run_options['explode_computeOneStep']=True
#run_options['output_frequency']=output_frequency
with MechanicsHdf5Runner(mode='r+') as io:
# By default earth gravity is applied and the units are those
# of the International System of Units.
# io.run(verbose=True,
# with_timer=False,
# bullet_options=bullet_options,
# face_class=None,
# edge_class=None,
# t0=0,
# T=T,
# h=0.001,
# theta=0.50001,
# Newton_max_iter=1,
# set_external_forces=None,
# solver_options=options,
# numerics_verbose=True,
# output_frequency=None,
# Newton_options= sk.SICONOS_TS_LINEAR_IMPLICIT,
# constraint_activation_threshold=1e-5,
# osi=sk.MoreauJeanGOSI,
# osns_assembly_type= sk.GLOBAL_REDUCED
# )
io.run(run_options)
|
[
"[email protected]"
] | |
128763e94d58774059e8218f401b3f0fd84cad73
|
bc9ebb347af6804c1bce6e960148ece1fbb34a47
|
/1_python/bronze/10870.py
|
d22572f825e791fd8b0ffbb53df9d5bdaad4045a
|
[] |
no_license
|
mooncs/BOJ
|
48416fec1a059197a72de61c8d6e72f7fc8b542b
|
45d2d5a8a6bf0f10d026f3846b70009914aa90d3
|
refs/heads/main
| 2023-08-13T15:30:47.395359 | 2021-10-11T13:37:04 | 2021-10-11T13:37:04 | 392,885,432 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 277 |
py
|
# 피보나치 수 5
def fibo(x):
if x <= 1:
return x
return fibo(x-2) + fibo(x-1)
n = int(input())
print(fibo(n))
# # for문
# def fibo(x):
# a, b = 0, 1
# for _ in range(x):
# a, b = b, a+b
# return(a)
# n = int(input())
# print(fibo(n))
|
[
"[email protected]"
] | |
40aa8ad79278c7537cdc7550405b8ad12b72d6e7
|
d554b1aa8b70fddf81da8988b4aaa43788fede88
|
/5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/222/users/4081/codes/1643_1055.py
|
5bb3ece083af6c50c3772003c55246d4aea20a12
|
[] |
no_license
|
JosephLevinthal/Research-projects
|
a3bc3ca3b09faad16f5cce5949a2279cf14742ba
|
60d5fd6eb864a5181f4321e7a992812f3c2139f9
|
refs/heads/master
| 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 384 |
py
|
# Teste seu código aos poucos.
# Não teste tudo no final, pois fica mais difícil de identificar erros.
# Use as mensagens de erro para corrigir seu código.
from math import*
v=float(input("velocidade inicial:"))
a=radians(float(input("angulos de tiro:")))
d=float(input("valor da distancia: "))
r=((v**2)*sin(2*a))/9.8
p=d-r
if(abs(p)<0.1):
print("sim")
else:
print("nao")
|
[
"[email protected]"
] | |
6c2b98a894099f068a128d68de56fc0ff0dcdde7
|
2b11e7aa28b84af2e2a7fd8719af89f5fffd8a5b
|
/tests/test_models/test_user.py
|
4b734786dc7c17a6ae7e51cd396963dfe334a4dd
|
[] |
no_license
|
nikolasribeiro/AirBnB_clone
|
6a3e3d65314a0131252461757943468628394ced
|
4529c56a706f0d956a238522d912cf6260f2fa28
|
refs/heads/main
| 2023-03-10T22:10:10.665939 | 2021-02-27T19:33:11 | 2021-02-27T19:33:11 | 338,063,410 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,112 |
py
|
#!/usr/bin/python3
""" Module tests/test_models/test_user"""
import models
from models.base_model import BaseModel
import os
import unittest
class TestBase_Model(unittest.TestCase):
""" class TestBase_Model """
def test_docstring(self):
""" function test_docstring """
msj = "Módulo does not has docstring"
self.assertIsNotNone(models.base_model.__doc__, msj)
msj = "Clase does not has docstring"
self.assertIsNotNone(BaseModel.__doc__, msj)
def test_executable_file(self):
""" function test_executable_file """
is_read_true = os.access("models/base_model.py", os.R_OK)
self.assertTrue(is_read_true)
is_write_true = os.access("models/base_model.py", os.W_OK)
self.assertTrue(is_write_true)
is_exec_true = os.access("models/base_model.py", os.X_OK)
self.assertTrue(is_exec_true)
def test_is_an_instance(self):
""" function test_is_an_instance """
my_model = BaseModel()
self.assertIsInstance(my_model, BaseModel)
def test_id(self):
""" function test_id """
my_model = BaseModel()
my_model1 = BaseModel()
self.assertNotEqual(my_model.id, my_model1.id)
def test_save(self):
""" function test_save """
my_model2 = BaseModel()
first_updated = my_model2.updated_at
my_model2.save()
second_updated = my_model2.updated_at
self.assertNotEqual(first_updated, second_updated)
def test_to_dict(self):
""" function test_to_dict """
my_model3 = BaseModel()
my_dict_model3 = my_model3.to_dict()
self.assertIsInstance(my_dict_model3, dict)
for key, value in my_dict_model3.items():
flag = 0
if my_dict_model3["__class__"] == "BaseModel":
flag += 1
self.assertTrue(flag == 1)
for key, value in my_dict_model3.items():
if key == "created_at":
self.assertIsInstance(value, str)
if key == "updated_at":
self.assertIsInstance(value, str)
|
[
"[email protected]"
] | |
9315cc8bf5f6132cf366ce7e7d880acd7293cd3f
|
88eeba6df8382687f36a4765bb298f76465c8e81
|
/general/chainerrl/chainerrl/tests/links_tests/test_noisy_linear.py
|
49b094838cec68f4f40aa91df7f9371a2755ba50
|
[
"MIT"
] |
permissive
|
daniellawson9999/quick_start
|
db0b6e382efd640754ca1e7800753c94e668423a
|
947d61f118433dcd4cb845f27649ebfbc8062ecc
|
refs/heads/master
| 2022-02-23T21:54:16.273530 | 2019-09-27T01:46:41 | 2019-09-27T01:46:41 | 197,873,032 | 0 | 0 | null | 2019-07-20T03:12:34 | 2019-07-20T03:12:31 | null |
UTF-8
|
Python
| false | false | 2,798 |
py
|
import unittest
import chainer
from chainer import cuda
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
import numpy
from chainerrl.links import noisy_linear
@testing.parameterize(*testing.product({
'size_args': [
(5,), # uninitialized from Chainer v2
(None, 5), # uninitialized
(6, 5), # initialized
],
'nobias': [False, True],
}))
class TestFactorizedNoisyLinear(unittest.TestCase):
def setUp(self):
mu = chainer.links.Linear(*self.size_args, nobias=self.nobias)
self.linear = noisy_linear.FactorizedNoisyLinear(mu)
def _test_calls(self, xp):
x_data = xp.arange(12).astype(numpy.float32).reshape((2, 6))
x = chainer.Variable(x_data)
self.linear(x)
self.linear(x_data + 1)
self.linear(x_data.reshape((2, 3, 2)))
def test_calls_cpu(self):
self._test_calls(numpy)
@attr.gpu
def test_calls_gpu(self):
self.linear.to_gpu(0)
self._test_calls(cuda.cupy)
@attr.gpu
def test_calls_gpu_after_to_gpu(self):
mu = self.linear.mu
mu.to_gpu(0)
self.linear = noisy_linear.FactorizedNoisyLinear(mu)
self._test_calls(cuda.cupy)
def _test_randomness(self, xp):
x = xp.random.standard_normal((10, 6)).astype(numpy.float32)
y1 = self.linear(x).array
y2 = self.linear(x).array
d = float(xp.mean(xp.square(y1 - y2)))
# The parameter name suggests that
# xp.sqrt(d / 2) is approx to sigma_scale = 0.4
# In fact, (for each element _[i, j],) it holds:
# \E[(y2 - y1) ** 2] = 2 * \Var(y) = (4 / pi) * sigma_scale ** 2
target = (0.4 ** 2) * 2
if self.nobias:
target *= 2 / numpy.pi
else:
target *= 2 / numpy.pi + numpy.sqrt(2 / numpy.pi) / y1.shape[1]
self.assertGreater(d, target / 3.)
self.assertLess(d, target * 3.)
@condition.retry(3)
def test_randomness_cpu(self):
self._test_randomness(numpy)
@attr.gpu
@condition.retry(3)
def test_randomness_gpu(self):
self.linear.to_gpu(0)
self._test_randomness(cuda.cupy)
def _test_non_randomness(self, xp):
# Noises should be the same in a batch
x0 = xp.random.standard_normal((1, 6)).astype(numpy.float32)
x = xp.broadcast_to(x0, (2, 6))
y = self.linear(x).array
xp.testing.assert_allclose(y[0], y[1], rtol=1e-4)
def test_non_randomness_cpu(self):
self._test_non_randomness(numpy)
@attr.gpu
def test_non_randomness_gpu(self):
self.linear.to_gpu(0)
self._test_non_randomness(cuda.cupy)
|
[
"[email protected]"
] | |
a7d4d5bf7c36dad18109efd3495f3312e958580c
|
931515a9fdd4404cb548fb6b80c91590f5d5e3c9
|
/presalytics/client/presalytics_ooxml_automation/models/chart_column_collections.py
|
91c556a70437a9633b3b183127aef59d065963d3
|
[
"MIT"
] |
permissive
|
presalytics/python-client
|
2e2fbd617b493ed8be90b844e23b736f294065e3
|
5d80b78562126feeeb49af4738e2c1aed12dce3a
|
refs/heads/master
| 2021-08-18T02:41:06.938468 | 2020-12-07T15:04:18 | 2020-12-07T15:04:18 | 203,414,411 | 4 | 1 |
MIT
| 2020-03-31T19:27:47 | 2019-08-20T16:31:57 |
Python
|
UTF-8
|
Python
| false | false | 3,705 |
py
|
# coding: utf-8
"""
OOXML Automation
This API helps users convert Excel and Powerpoint documents into rich, live dashboards and stories. # noqa: E501
The version of the OpenAPI document: 0.1.0-no-tags
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class ChartColumnCollections(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'chart_data_id': 'str',
'id': 'str'
}
attribute_map = {
'chart_data_id': 'chartDataId',
'id': 'id'
}
def __init__(self, chart_data_id=None, id=None): # noqa: E501
"""ChartColumnCollections - a model defined in OpenAPI""" # noqa: E501
self._chart_data_id = None
self._id = None
self.discriminator = None
self.chart_data_id = chart_data_id
if id is not None:
self.id = id
@property
def chart_data_id(self):
"""Gets the chart_data_id of this ChartColumnCollections. # noqa: E501
:return: The chart_data_id of this ChartColumnCollections. # noqa: E501
:rtype: str
"""
return self._chart_data_id
@chart_data_id.setter
def chart_data_id(self, chart_data_id):
"""Sets the chart_data_id of this ChartColumnCollections.
:param chart_data_id: The chart_data_id of this ChartColumnCollections. # noqa: E501
:type: str
"""
self._chart_data_id = chart_data_id
@property
def id(self):
"""Gets the id of this ChartColumnCollections. # noqa: E501
:return: The id of this ChartColumnCollections. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ChartColumnCollections.
:param id: The id of this ChartColumnCollections. # noqa: E501
:type: str
"""
self._id = id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ChartColumnCollections):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
5a0826ad9f7cbc75cb16320948b0a920328fccb2
|
a0d6cbae196c24254fb6f1411d756da0029e092a
|
/trunk/src/appserver/apps/user_srv_d/main.py
|
e626d55800d34accfb379fe41899e7ed973f72ca
|
[] |
no_license
|
newguangzhou/haha-cluster
|
8101ee1cb5b5ddbf916268029a33336c6fa0b06d
|
4cee4172f3bd7939e0369d46603a62087e206277
|
refs/heads/master
| 2021-05-16T10:21:38.245881 | 2017-09-25T03:36:07 | 2017-09-25T03:36:07 | 104,700,121 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,348 |
py
|
# -*- coding: utf-8 -*-
import sys
sys.path.append("../../")
sys.path.append("../terminal_srv_d/")
reload(sys)
sys.setdefaultencoding('utf-8')
#import setproctitle
from tornado import ioloop, gen
from tornado.web import Application, url
import tornado.options
from tornado.options import define, options
from lib.console import Console
from lib.pyloader import PyLoader
from lib.auth_dao import AuthDAO
from lib.user_dao import UserDAO
from lib.pet_dao import PetDAO
from lib.global_dao import GlobalDAO
#from lib.device_dao import DeivceDAO
from lib.sys_config import SysConfig
from lib.new_device_dao import NewDeviceDAO
from lib.gid_rpc import GIDRPC
from lib.msg_rpc import MsgRPC
from lib.boradcast_rpc import BroadcastRPC
from lib import sys_config, discover_config
from lib.service_discovery import server_discoverer_worker
from lib.mongo_dao_base import GetMongoClientAndAuth
from concurrent.futures import ThreadPoolExecutor
from lib.service_discovery import server_discoverer_worker
from lib import discover_config
import logging
logger = logging.getLogger(__name__)
support_setptitle = True
try:
import setproctitle
except:
support_setptitle = False
import handlers
define("debug_mode", 0, int,
"Enable debug mode, 1 is local debug, 2 is test, 0 is disable")
define("port", 9100, int, "Listen port, default is 9100")
define("address", "0.0.0.0", str, "Bind address, default is 127.0.0.1")
define("console_port", 9110, int, "Console listen port, default is 9110")
# Parse commandline
tornado.options.parse_command_line()
max_thread_count = 30
# Init pyloader
pyloader = PyLoader("config")
conf = pyloader.ReloadInst("Config")
mongo_pyloader = PyLoader("configs.mongo_config")
mongo_conf = mongo_pyloader.ReloadInst("MongoConfig",
debug_mode=options.debug_mode)
# Set process title
if support_setptitle:
setproctitle.setproctitle(conf.proctitle)
#
worker = server_discoverer_worker.ServerDiscovererWorker()
msg_rpc = MsgRPC(worker.get_discover())
broadcast_rpc = BroadcastRPC(worker.get_discover())
#
thread_pool = ThreadPoolExecutor(max_thread_count)
mongo_client = GetMongoClientAndAuth(mongo_conf.default_meta)
# Init web application
webapp = Application(
[
(r"/user/get_verify_code", handlers.GetVerifyCode),
(r"/user/push_message_cmd", handlers.PushMessageCmd),
(r"/user/login", handlers.Login),
(r"/user/register", handlers.Register),
(r"/user/logout", handlers.Logout),
(r"/user/regen_token", handlers.RegenToken),
(r"/user/set_home_wifi", handlers.SetHomeWifi),
(r"/user/set_home_location", handlers.SetHomeLocation),
(r"/user/get_base_infomation", handlers.GetBaseInfo),
(r"/user/suggest", handlers.Suggest),
(r"/pet/location", handlers.PetLocation),
(r"/pet/location_test", handlers.PetLocation2),
(r"/pet/walk", handlers.PetWalk),
(r"/pet/find", handlers.PetFind),
(r"/pet/get_pet_type_info", handlers.PetTypeInfo),
(r"/pet/get_pet_info", handlers.GetPetInfo),
(r"/pet/get_pet_status", handlers.GetPetStatusInfo),
(r"/pet/add_pet_info", handlers.AddPetInfo),
(r"/pet/update_pet_info", handlers.UpdatePetInfo),
(r"/pet/healthy/get_activity_info", handlers.GetActivityInfo),
(r"/pet/healthy/get_sleep_info", handlers.GetSleepInfo),
(r"/pet/healthy/summary", handlers.Summary),
(r"/pet/healthy/set_sport_info", handlers.SetTargetStep),
(r"/pet/activity", handlers.PetActivity),
(r"/device/add_device_info", handlers.AddDeviceInfo),
(r"/device/get_info", handlers.GetDeviceInfo),
(r"/device/remove_device_info", handlers.RemoveDeviceInfo),
(r"/device/set_sim_info", handlers.SetSimInfo),
(r"/device/switch_light", handlers.SwitchLight),
(r"/device/get_light_status", handlers.GetDeviceSwitchLightStatus),
(r"/device/send_get_wifi_list_cmd", handlers.SendGetWifiListCmd),
(r"/device/get_wifi_list", handlers.GetWifiList),
(r"/device/reboot_device_cmd", handlers.RebootDeviceCmd),
(r"/user/agree_policy", handlers.AgreePolicy),
(r"/device/get_device_status", handlers.GetPetStatusInfo),
(r"/app/get_config", handlers.AppConfig),
(r"/user/set_outdoor_on_off", handlers.OutdoorOnOff),
(r"/user/set_outdoor_wifi", handlers.SetOutdoorWifi),
],
debug=True,
autoreload=True,
pyloader=pyloader,
user_dao=UserDAO.new(mongo_client, thread_pool),
global_dao=GlobalDAO.new(mongo_client, thread_pool),
auth_dao=AuthDAO.new(mongo_client, thread_pool),
pet_dao=PetDAO.new(mongo_client, thread_pool),
device_dao=NewDeviceDAO.new(mongo_client, thread_pool),
broadcast_rpc = broadcast_rpc,
msg_rpc=msg_rpc,
appconfig=conf, )
class _UserSrvConsole(Console):
def handle_cmd(self, stream, address, cmd):
if len(cmd) == 1 and cmd[0] == "quit":
self.send_response(stream, "Byte!")
return False
elif len(cmd) == 0:
pass
elif len(cmd) == 1 and cmd[0] == "reload-config":
newconf = pyloader.ReloadInst("Config")
webapp.settings["appconfig"] = newconf
webapp.settings["gid_rpc"] = GIDRPC(newconf.gid_rpc_url)
self.send_response(stream, "done")
elif len(cmd) == 1 and cmd[0] == "reload-sysconfig":
webapp.settings["sysconfig"].reload()
self.send_response(stream, "done")
else:
self.send_response(stream, "Invalid command!")
return True
# Init console
console = _UserSrvConsole()
console.bind(options.console_port, "127.0.0.1")
console.start()
# Init async
@gen.coroutine
def _async_init():
SysConfig.new(sys_config.DEFAULT_CATEGORY,mongo_client, thread_pool)
yield SysConfig.current().open()
webapp.settings["gid_rpc"] = GIDRPC(SysConfig.current().get(sys_config.SC_GID_RPC_URL))
try:
worker.register(discover_config.USER_SRV_D, options.port, 0, None)
worker.work()
except Exception, e:
print "worker register error exception:", e
logger.exception(e)
exit(0)
ioloop.IOLoop.current().run_sync(_async_init)
# Run web app loop
webapp.listen(options.port, options.address, xheaders=True)
ioloop.IOLoop.current().start()
|
[
"[email protected]"
] | |
828855bc5a1f6617ef25c47b606649d873810864
|
1d49dcfe7a725ed9c21d5e614b7e61c81aae1c88
|
/modules/critics/CentralV.py
|
62f3043f15838fbfacbcde6b6d31b5066599a20e
|
[
"Apache-2.0"
] |
permissive
|
xiaojize/SMAC-1
|
c405aa22d30a7f176b4b2a29669ae82ea7f0b3c7
|
7aaf4673b0eecafc4ab25f381eea20fc762af56a
|
refs/heads/master
| 2023-06-30T14:37:44.870652 | 2021-07-23T15:15:49 | 2021-07-23T15:15:49 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 482 |
py
|
import torch.nn as nn
import torch.nn.functional as F
class CentralV_Critic(nn.Module):
def __init__(self, input_shape, args):
super(CentralV_Critic, self).__init__()
self.args = args
self.fc1 = nn.Linear(input_shape, 128)
self.fc2 = nn.Linear(128, 128)
self.fc3 = nn.Linear(128, 1)
def forward(self, state):
x = F.relu(self.fc1(state))
x = F.relu(self.fc2(x))
q = self.fc3(x)
return q
|
[
"[email protected]"
] | |
6cc03fb54250c0b2f6556012d2bf83b75474b3f2
|
9d278285f2bc899ac93ec887b1c31880ed39bf56
|
/ondoc/cart/migrations/0006_merge_20190326_1307.py
|
67a383ce2f25bfc403a88150e01f3f911f341528
|
[] |
no_license
|
ronit29/docprime
|
945c21f8787387b99e4916cb3ba1618bc2a85034
|
60d4caf6c52a8b70174a1f654bc792d825ba1054
|
refs/heads/master
| 2023-04-01T14:54:10.811765 | 2020-04-07T18:57:34 | 2020-04-07T18:57:34 | 353,953,576 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 267 |
py
|
# Generated by Django 2.0.5 on 2019-03-26 07:37
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cart', '0005_auto_20190315_1612'),
('cart', '0004_auto_20190318_1424'),
]
operations = [
]
|
[
"[email protected]"
] | |
bbc9346e361617f40137e996c9caee2f66e94355
|
032a0c939d96d0e5307dbce86e11faf7060f4ed9
|
/lte/gateway/python/magma/pipelined/tests/test_ipv6_prefix_mapper.py
|
d33410b7b423133760753874c76ffd7d50ae75a6
|
[
"BSD-3-Clause"
] |
permissive
|
radha0018/magma
|
cac9ff3491dd2661e5dc0aa1f9a304a5428e2d2a
|
8436966a4bb3cf7fdc3f567704062b6f9568db25
|
refs/heads/master
| 2023-05-05T08:26:07.132969 | 2021-05-27T18:44:44 | 2021-05-27T18:44:44 | 371,097,174 | 0 | 2 |
NOASSERTION
| 2021-05-26T16:26:21 | 2021-05-26T16:15:53 |
Go
|
UTF-8
|
Python
| false | false | 2,280 |
py
|
"""
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from magma.pipelined.ipv6_prefix_store import (
InterfaceIDToPrefixMapper,
get_ipv6_interface_id,
get_ipv6_prefix,
)
class InterfaceMappersTest(unittest.TestCase):
def setUp(self):
self._interface_to_prefix_mapper = InterfaceIDToPrefixMapper()
self._interface_to_prefix_mapper._prefix_by_interface = {}
def test_prefix_mapper_test(self):
ipv6_addrs = ['ba10:5:6c:9:9d21:4407:d337:1928',
'321b:534:6c:9:999:0:d337:1928',
'222b:5334:111c:111::d337:1928']
prefixes = [get_ipv6_prefix(ipv6_addrs[0]),
get_ipv6_prefix(ipv6_addrs[1])]
interfaces = [get_ipv6_interface_id(ipv6_addrs[0]),
get_ipv6_interface_id(ipv6_addrs[1]),
get_ipv6_interface_id(ipv6_addrs[2])]
self._interface_to_prefix_mapper.save_prefix(
interfaces[0], prefixes[0])
self.assertEqual(
self._interface_to_prefix_mapper.get_prefix(
interfaces[0]),
'ba10:5:6c:9::')
self._interface_to_prefix_mapper.save_prefix(
interfaces[1], prefixes[1])
self.assertEqual(interfaces[1], '::999:0:d337:1928')
self.assertEqual(
self._interface_to_prefix_mapper.get_prefix(
interfaces[1]),
prefixes[1])
self._interface_to_prefix_mapper.save_prefix(
interfaces[0], prefixes[1])
self.assertEqual(
self._interface_to_prefix_mapper.get_prefix(
interfaces[0]),
'321b:534:6c:9::')
self.assertEqual(
self._interface_to_prefix_mapper.get_prefix(
interfaces[2]),
None)
if __name__ == "__main__":
unittest.main()
|
[
"[email protected]"
] | |
d581e305ac079f2af1725f50e4bd33b9987b30cf
|
79b1d3d8ffbda5297fff6fefe2528e303bf2110a
|
/RSGGenFragment/RSToQQ/RSGravitonToQuarkQuark_W-0p1_M_3250_TuneCUETP8M1_13TeV_pythia8_cfi.py
|
4743fb46d70ff29d63a01653fe65bceda8571ccf
|
[] |
no_license
|
yguler/MCFragments-1
|
25745a043653d02be3a4c242c1a85af221fc34b3
|
7c4d10ee59e00f997221109bf006819fd645b92f
|
refs/heads/master
| 2021-01-13T14:09:12.811554 | 2016-12-11T15:57:37 | 2016-12-11T15:57:37 | 76,184,433 | 0 | 0 | null | 2016-12-11T15:59:22 | 2016-12-11T15:59:22 | null |
UTF-8
|
Python
| false | false | 1,323 |
py
|
import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
comEnergy = cms.double(13000.0),
crossSection = cms.untracked.double(0.00000782),
filterEfficiency = cms.untracked.double(1),
maxEventsToPrint = cms.untracked.int32(0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'ExtraDimensionsG*:ffbar2G* = on',
'ExtraDimensionsG*:kappaMG = 1.439532822',
'5100039:m0 = 3250',
'5100039:onMode = off',
'5100039:onIfAny = 1 2 3 4 5'
),
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters',
)
)
)
ProductionFilterSequence = cms.Sequence(generator)
|
[
"[email protected]"
] | |
0997db820df5512beb330aedeb592bcd7ec5f840
|
cb7c3673ad937c282a39be74d0aee8628e75928d
|
/tests/test_utils/output/uriandcurie.py
|
2c0bb6edc92ee3846661835fdd4a574c30b2da97
|
[
"CC0-1.0"
] |
permissive
|
bpow/linkml
|
649d6d48f39a8c51efa92fba7eb25c1d8854b472
|
ab83c0caee9c02457ea5a748e284dee6b547fcd6
|
refs/heads/main
| 2023-05-05T18:46:04.501897 | 2021-05-13T21:17:03 | 2021-05-13T21:17:03 | 371,163,928 | 0 | 0 |
CC0-1.0
| 2021-05-26T20:42:13 | 2021-05-26T20:42:12 | null |
UTF-8
|
Python
| false | false | 4,918 |
py
|
# Auto generated from uriandcurie.yaml by pythongen.py version: 0.9.0
# Generation date: 2021-03-26 14:22
# Schema: uriandcurie
#
# id: http://example.org/test/uriandcurie
# description:
# license:
import dataclasses
import sys
import re
from typing import Optional, List, Union, Dict, ClassVar, Any
from dataclasses import dataclass
from linkml_model.meta import EnumDefinition, PermissibleValue, PvFormulaOptions
from linkml.utils.slot import Slot
from linkml.utils.metamodelcore import empty_list, empty_dict, bnode
from linkml.utils.yamlutils import YAMLRoot, extended_str, extended_float, extended_int
from linkml.utils.dataclass_extensions_376 import dataclasses_init_fn_with_kwargs
from linkml.utils.formatutils import camelcase, underscore, sfx
from linkml.utils.enumerations import EnumDefinitionImpl
from rdflib import Namespace, URIRef
from linkml.utils.curienamespace import CurieNamespace
from linkml.utils.metamodelcore import Curie, ElementIdentifier, NCName, NodeIdentifier, URI, URIorCURIE
metamodel_version = "1.7.0"
# Overwrite dataclasses _init_fn to add **kwargs in __init__
dataclasses._init_fn = dataclasses_init_fn_with_kwargs
# Namespaces
M = CurieNamespace('m', 'http://example.org/test/uriandcurie')
SHEX = CurieNamespace('shex', 'http://www.w3.org/ns/shex#')
XSD = CurieNamespace('xsd', 'http://www.w3.org/2001/XMLSchema#')
DEFAULT_ = M
# Types
class String(str):
""" A character string """
type_class_uri = XSD.string
type_class_curie = "xsd:string"
type_name = "string"
type_model_uri = M.String
class Uriorcurie(URIorCURIE):
""" a URI or a CURIE """
type_class_uri = XSD.anyURI
type_class_curie = "xsd:anyURI"
type_name = "uriorcurie"
type_model_uri = M.Uriorcurie
class Uri(URI):
""" a complete URI """
type_class_uri = XSD.anyURI
type_class_curie = "xsd:anyURI"
type_name = "uri"
type_model_uri = M.Uri
class Curie(Curie):
""" a CURIE """
type_class_uri = XSD.anyURI
type_class_curie = "xsd:anyURI"
type_name = "curie"
type_model_uri = M.Curie
class Ncname(NCName):
""" Prefix part of CURIE """
type_class_uri = XSD.string
type_class_curie = "xsd:string"
type_name = "ncname"
type_model_uri = M.Ncname
class Objectidentifier(ElementIdentifier):
""" A URI or CURIE that represents an object in the model. """
type_class_uri = SHEX.iri
type_class_curie = "shex:iri"
type_name = "objectidentifier"
type_model_uri = M.Objectidentifier
class Nodeidentifier(NodeIdentifier):
""" A URI, CURIE or BNODE that represents a node in a model. """
type_class_uri = SHEX.nonliteral
type_class_curie = "shex:nonliteral"
type_name = "nodeidentifier"
type_model_uri = M.Nodeidentifier
# Class references
class C1Id(ElementIdentifier):
pass
@dataclass
class C1(YAMLRoot):
_inherited_slots: ClassVar[List[str]] = []
class_class_uri: ClassVar[URIRef] = M.C1
class_class_curie: ClassVar[str] = "m:C1"
class_name: ClassVar[str] = "c1"
class_model_uri: ClassVar[URIRef] = M.C1
id: Union[str, C1Id] = None
hasCurie: Optional[Union[str, Curie]] = None
hasURI: Optional[Union[str, URI]] = None
hasNcName: Optional[Union[str, NCName]] = None
id2: Optional[Union[str, NodeIdentifier]] = None
def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
if self.id is None:
raise ValueError("id must be supplied")
if not isinstance(self.id, C1Id):
self.id = C1Id(self.id)
if self.hasCurie is not None and not isinstance(self.hasCurie, Curie):
self.hasCurie = Curie(self.hasCurie)
if self.hasURI is not None and not isinstance(self.hasURI, URI):
self.hasURI = URI(self.hasURI)
if self.hasNcName is not None and not isinstance(self.hasNcName, NCName):
self.hasNcName = NCName(self.hasNcName)
if self.id2 is not None and not isinstance(self.id2, NodeIdentifier):
self.id2 = NodeIdentifier(self.id2)
super().__post_init__(**kwargs)
# Enumerations
# Slots
class slots:
pass
slots.id = Slot(uri=M.id, name="id", curie=M.curie('id'),
model_uri=M.id, domain=None, range=URIRef)
slots.hasCurie = Slot(uri=M.hasCurie, name="hasCurie", curie=M.curie('hasCurie'),
model_uri=M.hasCurie, domain=None, range=Optional[Union[str, Curie]])
slots.hasURI = Slot(uri=M.hasURI, name="hasURI", curie=M.curie('hasURI'),
model_uri=M.hasURI, domain=None, range=Optional[Union[str, URI]])
slots.hasNcName = Slot(uri=M.hasNcName, name="hasNcName", curie=M.curie('hasNcName'),
model_uri=M.hasNcName, domain=None, range=Optional[Union[str, NCName]])
slots.id2 = Slot(uri=M.id2, name="id2", curie=M.curie('id2'),
model_uri=M.id2, domain=None, range=Optional[Union[str, NodeIdentifier]])
|
[
"[email protected]"
] | |
b6e2ce22fb67076c267ba2e1fd71f0b24c1d2878
|
20dba145fd988d5901cfd335efe238c0dce8ac5b
|
/analytics/decorators/cache_dec.py
|
df12f6fbc3b61d39bd1710094aebd7c6bc2533c3
|
[
"BSD-3-Clause"
] |
permissive
|
ModelDBRepository/228604
|
10be01bf0eeea3ea07ef4c38ebb3b4c771000923
|
8f641f73bcac2700b476663fe656fcad7d63470d
|
refs/heads/master
| 2020-05-29T18:25:57.095212 | 2019-05-31T03:47:54 | 2019-05-31T03:47:54 | 189,299,677 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,489 |
py
|
""" a simple caching of function return values
using the decorator "cached", e.g.
@cached
def foo(a, b, c):
return a*b-c
will cache the result of the calculation foo does, which of course better not be this trivial.
works also for numpy arrays in the parameters.
should of course only be used on functions that do not depend on global parameters (as their state would not be cashed)
"""
import hashlib
import numpy as np
from functools import wraps
cache = {}
hits = 0
misses = 0
no_caching = False
def cached(func):
global cache
def hashit(a):
# builtin hash does weird things with complex number with integer real (or imag?) part : hash(1.5j-1) == hash(1.5j-2)
return (a.__hash__() if not isinstance(a,np.ndarray) else hashlib.sha1(a).hexdigest())
@wraps(func)
def wrapper(*args, **kwargs): # kwargs does not work yet!
global misses, hits
key = tuple([func.__name__]) + tuple(("",hashit(a)) for a in args) + tuple((k,hashit(v)) for k, v in sorted(kwargs.items()))
if no_caching:
return func(*args, **kwargs)
elif not cache.has_key(key):
#print func.__name__ + " missed " + str(key)
cache[key] = func(*args, **kwargs)
misses += 1
else:
hits += 1
#print func.__name__ + " hit"
return cache[key]
return wrapper
def clear_cache():
global cache, misses, hits
cache = {}
hits = 0
misses = 0
|
[
"[email protected]"
] | |
a108d8f0631873f4b65550ed4b7d482f12e3e8a6
|
02422812b5e93225f6c842ec57aae601cb939a8d
|
/tests/client/internal_messaging/test_producer.py
|
fc80a258c21a90b32dbe40386e709df21e14b6aa
|
[
"Apache-2.0"
] |
permissive
|
gcollard/lightbus
|
1af20564bb05df76ed7302f6eb93487c5b17592d
|
d04deeda8ccef5a582b79255725ca2025a085c02
|
refs/heads/master
| 2022-12-27T01:02:45.505846 | 2020-10-02T02:18:05 | 2020-10-02T02:18:05 | 300,042,306 | 0 | 0 |
Apache-2.0
| 2020-10-02T02:18:06 | 2020-09-30T19:44:52 |
Python
|
UTF-8
|
Python
| false | false | 2,372 |
py
|
import asyncio
import logging
import pytest
from _pytest.logging import LogCaptureFixture
from lightbus.client.internal_messaging.producer import InternalProducer
pytestmark = pytest.mark.unit
@pytest.mark.asyncio
async def test_queue_monitor(producer: InternalProducer, caplog: LogCaptureFixture, fake_coroutine):
"""Ensure the queue monitor logs as we expect
Note that something we implicitly test for here is that the monitor
does not log lots of duplicate lines. Rather it only logs when
something changes.
"""
producer.size_warning = 3
producer.monitor_interval = 0.01
caplog.set_level(logging.WARNING)
# Start the producer running
producer.start()
# No logging yet
assert not caplog.records
# Add a couple of items to the queue (still under size_warning)
producer.queue.put_nowait(None)
producer.queue.put_nowait(None)
await asyncio.sleep(0.05)
# Still no logging yet
assert not caplog.records
# One more gets us up to the warning level
producer.queue.put_nowait(None)
await asyncio.sleep(0.05)
# Now we have logging
assert len(caplog.records) == 1
assert caplog.records[0].getMessage() == "Queue in InternalProducer now has 3 commands."
caplog.clear() # Clear the log messages
# Let's check we get another messages when the queue gets bigger again
producer.queue.put_nowait(None)
await asyncio.sleep(0.05)
assert len(caplog.records) == 1
assert caplog.records[0].getMessage() == "Queue in InternalProducer now has 4 commands."
caplog.clear() # Clear the log messages
# Now check we get logging when the queue shrinks, but is still above the warning level
producer.queue.get_nowait()
await asyncio.sleep(0.05)
assert len(caplog.records) == 1
assert caplog.records[0].getMessage() == (
"Queue in InternalProducer has shrunk back down to 3 commands."
)
caplog.clear() # Clear the log messages
# Now check we get logging when the queue shrinks to BELOW the warning level
producer.queue.get_nowait()
await asyncio.sleep(0.05)
assert len(caplog.records) == 1
assert caplog.records[0].getMessage() == (
"Queue in InternalProducer has shrunk back down to 2 commands. "
"Queue is now at an OK size again."
)
caplog.clear() # Clear the log messages
|
[
"[email protected]"
] | |
cb2c66246218d18c73711d4760222ad0c1230cb8
|
571a89f94f3ebd9ec8e6b618cddb7d05811e0d62
|
/chokudai_S001/h/main.py
|
dee5983b58febe63c07ef1f8bf5b7db686e13a53
|
[] |
no_license
|
ryu19-1/atcoder_python
|
57de9e1db8ff13a107b5861f8f6a231e40366313
|
cc24b3c2895aad71d40cefbb8e2893dc397b8f4f
|
refs/heads/master
| 2023-05-10T05:32:16.507207 | 2021-05-19T17:48:10 | 2021-05-19T17:48:10 | 368,954,430 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 511 |
py
|
#!/usr/bin/env python3
import sys
from collections import deque, Counter
from heapq import heappop, heappush
from bisect import bisect_left
from itertools import accumulate
sys.setrecursionlimit(10**6)
INF = 10**12
m = 10**9 + 7
def main():
N = int(input())
a = list(map(int, input().split()))
dp = [INF] * N
for i in range(N):
d = bisect_left(dp, a[i])
dp[d] = a[i]
# print(i, dp)
ans = bisect_left(dp, INF)
print(ans)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
eac93448f682961cac9392c005e6e93abf7cac29
|
e5664b40c9d0a828c009b30ed8fe62666d04bf62
|
/falcon_marshmallow/_version.py
|
ceaa700e54e94982b6e19e2fb7dede45e5f07725
|
[
"MIT"
] |
permissive
|
evilr00t/falcon-marshmallow
|
9eb348fd68e1b0c85927e77f62bc02fc093ad28e
|
97f169c78f11a638b1f21b3a977bb5df8d071be5
|
refs/heads/master
| 2022-02-23T05:04:37.315682 | 2019-10-12T19:37:11 | 2019-10-12T19:37:11 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 499 |
py
|
# -*- coding: utf-8 -*-
"""
version.py module
The version set here will be automatically incorporated into setup.py
and also set as the __version__ attribute for the package.
"dev", "rc", and other verison tags should be added using the
``setup.py egg_info`` command when creating distributions.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
__version_info__ = (0, 4, 0)
__version__ = ".".join([str(ver) for ver in __version_info__])
|
[
"[email protected]"
] | |
93c094ec3ff67c2547a4273d6b6d7dd5b2d36e17
|
528c811306faa4a34bf51fca7955b7a24ac2e30c
|
/Python/Number of Islands II.py
|
ea9b85418e2cf1f4baca66002f08cbad1d4cd15e
|
[] |
no_license
|
ganjingcatherine/LeetCode-1
|
1addbd7e4d9254a146601f9d5e28b8becb8235a6
|
488782d3f1e759da2d32b4e82dbf55b96c431244
|
refs/heads/master
| 2021-05-11T03:15:16.810035 | 2016-02-06T06:19:18 | 2016-02-06T06:19:18 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,429 |
py
|
"""
A 2d grid map of m rows and n columns is initially filled with water. We may perform an addLand operation which turns the water at position (row, col) into a land. Given a list of positions to operate, count the number of islands after each addLand operation. An island is surrounded by water and is formed by connecting adjacent lands horizontally or vertically. You may assume all four edges of the grid are all surrounded by water.
Example:
Given m = 3, n = 3, positions = [[0,0], [0,1], [1,2], [2,1]].
Initially, the 2d grid grid is filled with water. (Assume 0 represents water and 1 represents land).
0 0 0
0 0 0
0 0 0
Operation #1: addLand(0, 0) turns the water at grid[0][0] into a land.
1 0 0
0 0 0 Number of islands = 1
0 0 0
Operation #2: addLand(0, 1) turns the water at grid[0][1] into a land.
1 1 0
0 0 0 Number of islands = 1
0 0 0
Operation #3: addLand(1, 2) turns the water at grid[1][2] into a land.
1 1 0
0 0 1 Number of islands = 2
0 0 0
Operation #4: addLand(2, 1) turns the water at grid[2][1] into a land.
1 1 0
0 0 1 Number of islands = 3
0 1 0
We return the result as an array: [1, 1, 2, 3]
"""
class union_find:
def __init__(self, m, n):
self.father = {}
self.m = m
self.n = n
for i in range(m):
for j in range(n):
id = self.convert_to_id(i, j)
self.father[id] = id
def find(self, x, y):
parent = self.father[self.convert_to_id(x, y)]
while parent != self.father[parent]:
parent = self.father[parent]
return parent
def compressed_find(self, x, y):
parent = self.father[self.convert_to_id(x, y)]
while parent != self.father[parent]:
parent = self.father[parent]
# set all father to be parent we just get
prev_father = self.father[self.convert_to_id(x, y)]
while prev_father != self.father[prev_father]:
prev_father, self.father[prev_father] = self.father[prev_father], parent
return parent
def union(self, x1, y1, x2, y2):
f1 = self.find(x1, y1)
f2 = self.find(x2, y2)
if f1 != f2:
self.father[f1] = f2
def convert_to_id(self, x, y):
return x * self.n + y
class Solution(object):
def numIslands2(self, m, n, positions):
"""
:type m: int
:type n: int
:type positions: List[List[int]]
:rtype: List[int]
"""
if m == 0 or n == 0:
return []
if not positions or len(positions) == 0:
return []
island = [[False for _ in range(n)] for _ in range(m)]
directions = [[0, -1], [0, 1], [1, 0], [-1, 0]]
count, uf, result = 0, union_find(m, n), []
for position in positions:
x, y = position[0], position[1]
if not island[x][y]:
count += 1
island[x][y] = True
for i in range(4):
nx, ny = x + directions[i][0], y + directions[i][1]
if 0 <= nx < m and 0 <= ny < n and island[nx][ny]:
position_father = uf.find(x, y)
now_father = uf.find(nx, ny)
if position_father != now_father:
count -= 1
uf.union(x, y, nx, ny)
result.append(count)
return result
|
[
"[email protected]"
] | |
8b5353bb413efa3cbabe1730e3767936265568a8
|
0d0efed91a1e320509a7625bd72ebea1b64fc95b
|
/numpy_learn/5_numpy_function.py
|
0fc81c094aa3704f8098fde5e9b67f07f7783576
|
[] |
no_license
|
starryrbs/python_ai
|
ed74a3c2d53378b47b2be910d97255f2706fd25e
|
80f8fd361d7b366ba0607417f0272bbaa3672e51
|
refs/heads/master
| 2020-04-24T03:48:52.260392 | 2019-02-20T13:56:42 | 2019-02-20T13:56:42 | 171,681,894 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,783 |
py
|
import numpy as np
# numpy的随机数函数
# numpy的random子库:np.random.*,主要有np.random.rand() np.random.randn() np.random.randint()
# rand(d0,d1,d2,……,dn) : 根据d0-dn创建随机数组,浮点数,范围是[0,1),均匀分布
a = np.random.rand(2, 3, 4)
print(a)
"""
[[[0.4506612 0.5296636 0.9747625 0.90105177]
[0.25850117 0.90704491 0.87144252 0.00418912]
[0.69423447 0.690204 0.4432447 0.37734196]]
[[0.41056822 0.4220897 0.80819521 0.99022746]
[0.61803924 0.93554027 0.3742707 0.94081985]
[0.15283965 0.09844152 0.25726209 0.24488101]]]
"""
# randn(d0,d1,d2,……,dn) : 根据d0-dn创建随机数组,标准正态分布
a = np.random.randn(2, 3, 4)
print(a)
# randint(low, high, shape) : 根据shape创建随机数数组,范围是[low, high)
a = np.random.randint(5, 10, (2, 3, 4))
print(a)
"""
[[[5 6 5 7]
[7 5 9 5]
[6 7 6 5]]
[[8 6 7 5]
[6 8 5 6]
[8 8 7 9]]]
"""
# seed(s) : 随机数种子,s是给定的种子值
# np.random.seed(5)
# a = np.random.randint(5, 10, (2, 3, 4))
# print(a)
"""
[[[8 5 6 5]
[9 8 5 5]
[9 6 5 8]]
[[9 8 6 9]
[7 6 6 7]
[6 6 6 7]]]
"""
# 如上图:当给定的种子值为4时,数组的值并不会改变
# shuffle(a): 根据数组a的每一纵列进行随机排列,数组a发生改变
a = np.random.randint(5, 10, (3, 4))
print(a)
"""
[[[6 8 7 8]
[9 7 7 9]
[5 6 6 8]]
[[6 6 5 6]
[5 7 5 5]
[6 8 5 9]]]
"""
np.random.shuffle(a)
print(a)
"""
[[8 7 8 7]
[5 6 5 8]
[7 9 5 5]]
[[5 6 5 8]
[8 7 8 7]
[7 9 5 5]]
"""
# permutation(a) :根据数组a的每一纵列进行随机排列,数组a不改变
a = np.random.randint(5, 10, (3, 4))
print(a)
"""
[[8 7 5 9]
[5 9 8 6]
[6 6 5 5]]
"""
b = np.random.permutation(a)
print(a)
"""
[[9 5 7 9]
[5 9 5 7]
[6 8 6 7]]
"""
print(b)
"""
[[5 9 5 7]
[6 8 6 7]
[9 5 7 9]]
"""
# choice(a, size, replace, p):从一维数组a中以概率p抽取元素,形成size形状的新数组,replace表示是否可以重用元素,默认为True
a = np.arange(6)
print(np.random.choice(a, 2, replace=False, p=a / np.sum(a)))
# replace在一维数组中有效
"""
uniform(low, high, size) : 产生具有均匀分布的数组,low起始值,high结束值,size形状
normal(loc,scale,size) : 产生具有正态分布的数组,loc均值,scale标准差,size形状
poisson(lam,size) : 产生具有泊松分布的数组,lam随机事件发生率,size形状
"""
# numpy的统计函数:
# np.sum(a, axis=None) : 根据给定轴axis计算数组a相关元素之和,axis整数或元组。
a = np.arange(15).reshape((3, 5))
print(a)
"""
[[ 0 1 2 3 4]
[ 5 6 7 8 9]
[10 11 12 13 14]]
"""
print(np.sum(a, axis=0))
# [15 18 21 24 27]
print(np.sum(a, axis=1))
# [10 35 60]
"""
当axis=None时,np.sum(a)表示数组a的所有元素总和
当axis=0时,表示的是数组a各纵列元素之和
当axis=1时,表示的是数组a各横列元素之和
mean(a, axis=None) :根据给定轴axis计算数组a相关元素的期望,axis整数或元组
"""
# mean 求取均值
print(1, np.mean(a))
print(np.mean(a, axis=0))
# average(a,axis=None,weights=None):根据给定轴axis计算数组a相关元素的加权平均值
print(np.average(a, axis=0, weights=[2, 3, 4]))
# [ 6.11111111 7.11111111 8.11111111 9.11111111 10.11111111]
# 6.111111111111111是这样计算出来的: (0 * 2 + 5 * 3 + 4 * 10) / (2 + 3 + 4)
"""
std(a, axis=None) : 根据给定轴axis计算数组a相关元素的标准差
var(a, axis=None) : 根据给定轴axis计算数组a相关元素的方差
min(a) max(a) : 计算数组a中元素的最小值、最大值
argmin(a) argmax(a) : 计算数组a中元素最小值、最大值的降一维后下标
unravel_index(index, shape) : 根据shape将一维下标index转换成多维下标
ptp(a) : 计算数组a中元素最大值与最小值的差
median(a) : 计算数组a中元素的中位数(中值)
"""
print("----------梯度函数------------")
"""
np.gradient(a) :计算数组a中元素的梯度,当a为多维时,返回每个维度梯度
梯度:连续值之间的变化率,即斜率
XY坐标轴连续三个X坐标对应的Y轴值:a, b, c,其中,b的梯度是: (c‐a)/2
"""
a = np.random.randint(0, 20, (5))
print(a)
# [ 5 5 13 6 10]
print(np.gradient(a))
# [ 0. 4. 0.5 -1.5 4. ]
# 0 : (5-5)/1
# 4. : (10-6)/1
# 0.5: (6-5)/2
# 4. : (13-5)/2
# 当a为多维数组时
a = np.arange(12).reshape(2,6)
print(a)
"""
[[ 0 1 2 3 4 5]
[ 6 7 8 9 10 11]]
"""
print(np.gradient(a))
"""
[array([[6., 6., 6., 6., 6., 6.],
[6., 6., 6., 6., 6., 6.]]), array([[1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1.]])]
"""
# 上侧表示最外层维度(axis=0)的梯度,下侧表示第二层维度(axis=1)的梯度。
|
[
"[email protected]"
] | |
6aec87a1fbe7be776d760cf637c53614801b725b
|
35286efd76814a1f3bc05da07f2968d05737c238
|
/esim/test.py
|
b013aa69540a306acd2cfacf63915c5ba49b3226
|
[
"Apache-2.0"
] |
permissive
|
jiniaoxu/text_matching
|
ac41c7de8f66f61a6958a35dfd4584539cd97c51
|
154de91000e8677703192cf5eae49fc6c3c09eea
|
refs/heads/master
| 2020-06-04T05:45:09.320991 | 2019-06-13T02:50:54 | 2019-06-13T02:50:54 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 782 |
py
|
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
from esim.graph import Graph
import tensorflow as tf
from utils.load_data import load_data
os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE"
os.environ['CUDA_VISIBLE_DEVICES'] = '2'
p, h, y = load_data('ccb/test.csv', data_size=1000)
model = Graph()
saver = tf.train.Saver()
with tf.Session()as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess, '../output/esim/esim_12.ckpt')
loss, acc = sess.run([model.loss, model.acc],
feed_dict={model.p: p,
model.h: h,
model.y: y,
model.keep_prob: 1})
print('loss: ', loss, ' acc:', acc)
|
[
"[email protected]"
] | |
561107764d55ee75983f3adc71f5cf85b27d5ea0
|
5a45981c89d0d9c0f2e9453abdefc333deb53e80
|
/nanodet/model/fpn/fpn.py
|
b031c6c81b0d7eacf7b045c53975dc5b07aa5c94
|
[
"Apache-2.0"
] |
permissive
|
zhiqwang/nanodet
|
fd0b2e9c4badf492649aef7c3b397394c3110d1d
|
dd94177c0cb411ee21f4fc4ebc2ef01647e64823
|
refs/heads/main
| 2023-03-17T12:23:12.788037 | 2021-03-15T12:00:19 | 2021-03-15T12:00:19 | 348,642,567 | 2 | 0 |
Apache-2.0
| 2021-03-17T09:01:43 | 2021-03-17T09:01:43 | null |
UTF-8
|
Python
| false | false | 3,241 |
py
|
# Modification 2020 RangiLyu
# Copyright 2018-2019 Open-MMLab.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch.nn as nn
import torch.nn.functional as F
from ..module.conv import ConvModule
from ..module.init_weights import xavier_init
class FPN(nn.Module):
def __init__(self,
in_channels,
out_channels,
num_outs,
start_level=0,
end_level=-1,
conv_cfg=None,
norm_cfg=None,
activation=None
):
super(FPN, self).__init__()
assert isinstance(in_channels, list)
self.in_channels = in_channels
self.out_channels = out_channels
self.num_ins = len(in_channels)
self.num_outs = num_outs
self.fp16_enabled = False
if end_level == -1:
self.backbone_end_level = self.num_ins
assert num_outs >= self.num_ins - start_level
else:
# if end_level < inputs, no extra level is allowed
self.backbone_end_level = end_level
assert end_level <= len(in_channels)
assert num_outs == end_level - start_level
self.start_level = start_level
self.end_level = end_level
self.lateral_convs = nn.ModuleList()
for i in range(self.start_level, self.backbone_end_level):
l_conv = ConvModule(
in_channels[i],
out_channels,
1,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
activation=activation,
inplace=False)
self.lateral_convs.append(l_conv)
self.init_weights()
# default init_weights for conv(msra) and norm in ConvModule
def init_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
xavier_init(m, distribution='uniform')
def forward(self, inputs):
assert len(inputs) == len(self.in_channels)
# build laterals
laterals = [
lateral_conv(inputs[i + self.start_level])
for i, lateral_conv in enumerate(self.lateral_convs)
]
# build top-down path
used_backbone_levels = len(laterals)
for i in range(used_backbone_levels - 1, 0, -1):
prev_shape = laterals[i - 1].shape[2:]
laterals[i - 1] += F.interpolate(
laterals[i], size=prev_shape, mode='bilinear')
# build outputs
outs = [
# self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels)
laterals[i] for i in range(used_backbone_levels)
]
return tuple(outs)
# if __name__ == '__main__':
|
[
"[email protected]"
] | |
f1b6f23525382617a5501166f87ecca57e0d62c3
|
938a496fe78d5538af94017c78a11615a8498682
|
/algorithms/401-500/442.find-all-duplicates-in-an-array.py
|
6a7dfa1426ec528b0bb7cf1b4da44bb4ceb85ca5
|
[] |
no_license
|
huilizhou/Leetcode-pyhton
|
261280044d15d0baeb227248ade675177efdb297
|
6ae85bf79c5a21735e3c245c0c256f29c1c60926
|
refs/heads/master
| 2020-03-28T15:57:52.762162 | 2019-11-26T06:14:13 | 2019-11-26T06:14:13 | 148,644,059 | 8 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 752 |
py
|
# 数组中重复的数据
class Solution(object):
def findDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
# 我的写法,不合题意。时间复杂度上
# dic = {}
# res = []
# for i in nums:
# dic[i] = dic.get(i, 0) + 1
# if dic[i] > 1:
# res.append(i)
# return res
# 人家的写法,将元素变成索引,因为题目中的范围是1<=a[i]<=n
res = []
for n in nums:
if nums[abs(n) - 1] > 0:
nums[abs(n) - 1] *= -1
else:
res.append(abs(n))
return res
print(Solution().findDuplicates([4, 3, 2, 7, 8, 2, 3, 1]))
|
[
"[email protected]"
] | |
f2b3256c22467e1b32dda229247fffda1cde9b95
|
e3bb63f93e36aab4a78356ba9d0e82f935325906
|
/bitmovin/resources/models/manifests/hls/vtt_media.py
|
78827f4ae4353e744ea3c2459772045c4d003fa8
|
[
"Unlicense"
] |
permissive
|
camberbridge/bitmovin-python
|
1668367980df49f9088b93e4b6764563cbdb8bcf
|
3af4c6e79b0291fda05fd1ceeb5bed1bba9f3c95
|
refs/heads/master
| 2020-04-09T17:51:46.786389 | 2018-11-30T14:46:34 | 2018-11-30T14:46:34 | 160,493,890 | 0 | 0 |
Unlicense
| 2018-12-05T09:31:18 | 2018-12-05T09:31:17 | null |
UTF-8
|
Python
| false | false | 1,259 |
py
|
from .abstract_media import AbstractMedia
class VttMedia(AbstractMedia):
def __init__(self, name, group_id, vtt_url, language=None, assoc_language=None, is_default=None, autoselect=None,
characteristics=None, id_=None):
super().__init__(id_=id_, name=name, group_id=group_id, language=language, assoc_language=assoc_language,
is_default=is_default, autoselect=autoselect, characteristics=characteristics)
self.vttUrl = vtt_url
@classmethod
def parse_from_json_object(cls, json_object):
media = super().parse_from_json_object(json_object=json_object)
id_ = media.id
name = media.name
group_id = media.groupId
language = media.language
assoc_language = media.assocLanguage
is_default = media.isDefault
autoselect = media.autoselect
characteristics = media.characteristics
vtt_url = json_object.get('vttUrl')
vtt_media = VttMedia(id_=id_, name=name, group_id=group_id, language=language, assoc_language=assoc_language,
is_default=is_default, autoselect=autoselect, characteristics=characteristics,
vtt_url=vtt_url)
return vtt_media
|
[
"[email protected]"
] | |
3c32af0c8c3dd971d0aaa4bddbac2f32bc78ea47
|
93d361d1cfaf5065aada52ff53833b67302c2b1c
|
/project/urls.py
|
9cef54d5038b91c04d21c889fda0d9087dcbd3ed
|
[] |
no_license
|
append-knowledge/restapi-with-jwt-token
|
0fe573cd45633829645544447f66e6d6b43458ad
|
fbd276fb38cbd687253176b1dd96f07e16707dfd
|
refs/heads/master
| 2023-08-27T02:55:20.826945 | 2021-10-09T18:33:52 | 2021-10-09T18:33:52 | 415,391,422 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 507 |
py
|
from django.urls import path
from project import views
urlpatterns=[
path('accounts/signup',views.SignUpview.as_view(),name='signup'),
path('accounts/signin',views.SignInView.as_view(),name='signin'),
path('accounts/signout',views.SignOutView.as_view(),name='logout'),
path('accounts/home',views.HomeView.as_view(),name='home'),
path('accounts/change/<int:id>',views.ChangeDetailsView.as_view(),name='editdetails'),
path('accounts/remove/<int:id>',views.delete,name='removeitem')
]
|
[
"[email protected]"
] | |
94792a1bda13eac1d3f97a44481616c63e24d376
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_135/1642.py
|
c31871167442ed89cb0e8fb17031677d335e0e83
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,477 |
py
|
#-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: Nishant
#
# Created: 12-04-2014
# Copyright: (c) Nishant 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
def main():
input_file = "E:\Dropbox\CodeBase\Python\GoogleCodeJam_2014\A-small-attempt0.in"
output_file = "E:\Dropbox\CodeBase\Python\GoogleCodeJam_2014\A-output.txt"
f = open(input_file, 'r')
o = open(output_file, 'w')
cases = int(f.readline())
lst = list(f)
i = 0
j = 1
while i < (cases * 10):
first = int(lst[i])
# print (first)
arr1 = [lst[i+1], lst[i+2], lst[i+3], lst[i+4]][first-1]
# print (arr1)
i += 5
sec = int(lst[i])
# print (sec)
arr2 = [lst[i+1], lst[i+2], lst[i+3], lst[i+4]][sec-1]
# print (arr2)
i += 5
set1 = set(arr1.split())
set2 = set(arr2.split())
# print (set1)
# print (set2)
res = set1 & set2
if len(res) == 0:
o.write ("Case #%s: Volunteer cheated!\n" %(j))
elif len(res) > 1:
o.write ("Case #%s: Bad magician!\n" %(j))
else:
o.write ("Case #%s: %s\n" %(j, next(iter(res))))
j += 1
f.close()
o.close()
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
b05399a6ff94cb5efa0799a162c6431e21c5440a
|
e68a40e90c782edae9d8f89b827038cdc69933c4
|
/res_bw/scripts/common/lib/plat-mac/carbon/carbonevt.py
|
a403e280d89920be14e1e2e9b2990efb37dd6195
|
[] |
no_license
|
webiumsk/WOT-0.9.16
|
2486f8b632206b992232b59d1a50c770c137ad7d
|
71813222818d33e73e414e66daa743bd7701492e
|
refs/heads/master
| 2021-01-10T23:12:33.539240 | 2016-10-11T21:00:57 | 2016-10-11T21:00:57 | 70,634,922 | 0 | 0 | null | null | null | null |
WINDOWS-1250
|
Python
| false | false | 372 |
py
|
# 2016.10.11 22:21:54 Střední Evropa (letní čas)
# Embedded file name: scripts/common/Lib/plat-mac/Carbon/CarbonEvt.py
from _CarbonEvt import *
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\plat-mac\carbon\carbonevt.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.10.11 22:21:54 Střední Evropa (letní čas)
|
[
"[email protected]"
] | |
7331db6bbc26b8c2088cca46dffdc7622db5ffc5
|
aa15002c5316b4c7e0a9563a40826057729e0b13
|
/tensorflow/python/keras/layers/preprocessing/table_utils.py
|
f5397da1f3eb482547e40b4ab293d3051753f429
|
[
"Apache-2.0"
] |
permissive
|
kkimdev/tensorflow
|
8238c5594ae44f084725ddf9b34d6d41645d4072
|
2fb75db6ad4f4a7f01ef4755b96b49f8eb6108db
|
refs/heads/master
| 2020-07-07T18:09:40.662883 | 2020-05-14T18:59:11 | 2020-05-14T19:05:05 | 203,429,154 | 0 | 0 |
Apache-2.0
| 2019-08-20T18:07:46 | 2019-08-20T18:07:46 | null |
UTF-8
|
Python
| false | false | 7,427 |
py
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for working with tf.lookup tables in Keras."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.keras import backend as K
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops.ragged import ragged_functional_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import gfile
class TableHandler(object):
"""Wrapper object that holds a lookup table and provides accessors."""
def __init__(self, table, oov_tokens=None, use_v1_apis=False):
self.table = table
self.use_v1_apis = use_v1_apis
if oov_tokens is None:
self.oov_tokens = oov_tokens
else:
if not isinstance(oov_tokens, (list, tuple, np.ndarray)):
oov_tokens = [oov_tokens]
self.oov_tokens = math_ops.cast(oov_tokens, table._value_dtype) # pylint: disable=protected-access
def data(self):
keys, values = self.table.export()
return (self._eval(keys), self._eval(values))
def vocab_size(self):
return self._eval(self.table.size())
def clear(self):
keys, _ = self.table.export()
self._run(self.table.remove(keys))
def insert(self, keys, values):
if len(values) != len(keys):
raise RuntimeError("Size mismatch between values and key arrays. "
"Keys had size %s, values had size %s." %
(len(keys), len(values)))
self._run(self.table.insert(keys, values))
def _replace_oov_buckets(self, inputs, lookups):
"""Replace the default OOV value with one of the OOV bucket values."""
if self.oov_tokens is None:
return lookups
num_oov_elements = self.oov_tokens.shape.num_elements()
if inputs.dtype.is_integer:
oov_indices = math_ops.floormod(inputs, num_oov_elements)
else:
oov_indices = string_ops.string_to_hash_bucket_fast(
inputs, num_buckets=num_oov_elements)
oov_values = array_ops.gather(self.oov_tokens, oov_indices)
oov_locations = math_ops.equal(lookups, self.table._default_value) # pylint: disable=protected-access
return array_ops.where(oov_locations, oov_values, lookups)
def _ragged_lookup(self, inputs):
"""Perform a table lookup on a ragged tensor."""
# The table lookup ops don't natively support ragged tensors, so if we have
# a RT we need to use map_flat_values to look up every element.
indexed_data = ragged_functional_ops.map_flat_values(
self.table.lookup, inputs)
indexed_data = ragged_functional_ops.map_flat_values(
self._replace_oov_buckets, inputs, indexed_data)
# Composite tensors can pass tensor values through, which will cause
# errors if all operations in the TF graph do so. We can break this chain
# with an identity here.
return array_ops.identity(indexed_data)
def _sparse_lookup(self, inputs):
"""Perform a table lookup on a sparse tensor."""
values = self.table.lookup(inputs.values)
values = self._replace_oov_buckets(inputs.values, values)
indexed_data = sparse_tensor.SparseTensor(inputs.indices, values,
inputs.dense_shape)
# Composite tensors can pass tensor values through, which will cause
# errors if all operations in the TF graph do so. We can break this chain
# with an identity here.
return array_ops.identity(indexed_data)
def _tensor_lookup(self, inputs):
"""Perform a table lookup on a tf.tensor."""
values = self.table.lookup(inputs)
indexed_data = self._replace_oov_buckets(inputs, values)
# (b/149446477): output does not preserve input shape.
indexed_data.set_shape(inputs.shape)
return indexed_data
def lookup(self, inputs):
"""Perform a table lookup."""
# Sparse tensors don't play nicely with tensor conversion, so we handle
# them before attempting to convert lists or arrays to tensors.
if isinstance(
inputs, (sparse_tensor.SparseTensor, sparse_tensor.SparseTensorValue)):
return self._sparse_lookup(inputs)
# Try to convert lists/arrays to tensors or RaggedTensors.
inputs = ragged_tensor.convert_to_tensor_or_ragged_tensor(inputs)
# Run the lookup operation on the converted tensor.
if ragged_tensor.is_ragged(inputs):
return self._ragged_lookup(inputs)
else:
return self._tensor_lookup(inputs)
def _eval(self, tensor):
if self.use_v1_apis:
return K.get_session().run(tensor)
else:
return tensor.numpy()
def _run(self, op):
if self.use_v1_apis:
K.get_session().run(op)
def get_vocabulary_from_file(vocabulary_path, encoding="utf-8"):
"""Read a vocabulary in from a file."""
vocab = []
with gfile.GFile(vocabulary_path, "r") as reader:
while True:
# Get the next line (incl. \n), and break if nothing is left to read.
text = reader.readline()
if not text:
break
# Convert the raw text and strip whitespace.
if isinstance(text, str):
token = text
elif isinstance(text, bytes):
token = text.decode(encoding, "ignore")
token = token.strip()
vocab.append(token)
return vocab
def validate_vocabulary_is_unique(vocabulary):
"""Validate that a vocabulary contains no repeated tokens."""
vocabulary_set = set(vocabulary)
if len(vocabulary) != len(vocabulary_set):
repeated_items = [
item for item, count in collections.Counter(vocabulary).items()
if count > 1
]
raise ValueError("The passed vocabulary has at least one repeated "
"term. Please uniquify your dataset. The repeated terms "
"are %s" % repeated_items)
def assert_same_type(expected_type, values, value_name):
"""Assert that 'values' is of type 'expected_type'."""
if dtypes.as_dtype(expected_type) != dtypes.as_dtype(values.dtype):
raise RuntimeError("Expected %s type %s, got %s" %
(value_name, expected_type, values.dtype))
def convert_to_ndarray(x, dtype=None):
"""Convert 'x' to a numpy array."""
array = np.array(x) if isinstance(x, (list, tuple)) else x
if dtype not in (None, dtypes.string):
# If the dtype is an integer, we do permissive casting. This allows
# users to examine int32 data if the dtype is int64 without trouble.
np_dtype = dtypes.as_dtype(dtype).as_numpy_dtype
if np.can_cast(array.dtype, np_dtype):
array = array.astype(np_dtype, casting="safe")
return array
|
[
"[email protected]"
] | |
ae2f3de1b7eacdc7cfaca05fea27de5ee8f08410
|
da1d21bb8d0760bfba61cd5d9800400f928868aa
|
/misc/scripts/category_transformation_001.py
|
0226537d27ec40ac6726d5b97eb9d427f608ba0e
|
[] |
no_license
|
biznixcn/WR
|
28e6a5d10f53a0bfe70abc3a081c0bf5a5457596
|
5650fbe59f8dfef836503b8092080f06dd214c2c
|
refs/heads/master
| 2021-01-20T23:53:52.887225 | 2014-05-13T02:00:33 | 2014-05-13T02:00:33 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 724 |
py
|
# -*- coding: utf-8 -*-
from circuits.models import Circuit
from circuits.utils import CircuitCategory
"""
Transformaciones
Literary + Culture + Music + Art + Academic & Education => Arts & Culture
3 4 8 10 19 => 4
Lifestyle + Green + Fashion + Design + Technology + Business + Geek + Spiritual + Entertainment => Lifestyle
18 7 6 11 16 14 17 21 25 18
"""
for circuit in Circuits.objects.filter(category__in=[3, 8, 10, 19]):
circuit.category = 4
circuit.save()
for circuit in Circuits.objects.filter(category__in=[7, 6, 11, 16, 14, 17, 21, 25]):
circuit.category = 18
circuit.save()
|
[
"[email protected]"
] | |
8c5db1946658ab443d7300038473c82702e1de90
|
04125b74273ad8b648343691565ab0cd6e25fa50
|
/image_comp_test.py
|
b32b911b4decb827e8360a480808f031846c8d3a
|
[] |
no_license
|
alpha0080/spineToolAdv
|
32918fa10b47ec9f19586b8878b243afd9dae945
|
c394e382502c11fb2b19f86f1e6352dee76444b5
|
refs/heads/master
| 2021-07-25T09:10:38.883564 | 2018-12-11T00:56:12 | 2018-12-11T00:56:12 | 142,319,584 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 954 |
py
|
import os, math,time
try:
sys.path.append("C:/Program Files/Pixar/RenderManProServer-22.1/lib/python2.7/Libs/ite-packages")
#sys.path.append("C:/Program Files/Pixar/RenderManProServer-21.7/lib/python2.7/Lib/site-packages")
import ice
except:
pass
import ice
max_frames_row = 10.0
frames = []
tile_width = 0
tile_height = 0
spritesheet_width = 0
spritesheet_height = 0
folder = "C:/Temp/testImage/1"
files = os.listdir(folder)
files.sort()
#print(files)
for i in files:
filename = folder +'/' +i
image = ice.Load(filename)
imageMetaData = image.GetMetaData()
frames.append(imageMetaData)
print frames
# imageSize = imageMetaData['Original Size']
#imageWidth = int(imageMetaData['Original Size'].split(" ")[0].split("(")[1])
#imageHeight = int(imageMetaData['Original Size'].split(" ")[1].split(")")[0])
###ref https://minzkraut.com/2016/11/23/making-a-simple-spritesheet-generator-in-python/
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.