blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a3c3350d331393ea83eede2ee19ca9d984c1bfd0
|
589385876626ffdf0e3a960a191eed9b90fa6eb2
|
/trunk/aztk/web/user_homepage.py
|
0caf42cd3ffc7a4a1681b32a784c6440e5ed2011
|
[
"BSD-3-Clause"
] |
permissive
|
BGCX261/zoto-server-svn-to-git
|
bb7545852bd52d6626f3b2b9c0b1d5834eb08201
|
73abf60264ae5a6b610d19e25be833f0754b160e
|
refs/heads/master
| 2021-01-21T23:04:02.366636 | 2015-08-25T15:16:12 | 2015-08-25T15:16:12 | 41,602,694 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,309 |
py
|
"""
dyn_pages/user_homepage.py
Author: Trey Stout
Date Added: ?
User homepage. Also the starting point for all user interior pages.
"""
## STD LIBS
## OUR LIBS
from zoto_base_page import zoto_base_page
from dyn_image_handler import dyn_image_handler
from user_albums import user_albums
from user_galleries import user_galleries
from user_publish import user_publish
from feeds import feeds
from other_sizes import other_sizes
## 3RD PARTY LIBS
from twisted.web.util import redirectTo
from twisted.internet.defer import Deferred
from nevow import loaders, inevow, tags as T
class user_homepage(zoto_base_page):
local_js_includes = [
"countries.js",
"static_news.js",
"select_box.lib.js",
"pagination.lib.js",
"e_paper.lib.js",
"globber.lib.js",
"tag_cloud.lib.js",
"comments.lib.js",
"albums.lib.js",
"featured_media.lib.js",
"widget.lib.js",
"image_detail.lib.js",
"table.lib.js",
"lookahead.lib.js",
"detect.lib.js",
"third_party/swfobject.js",
"messages.lib.js"
]
page_manager_js = "managers/user_homepage.js"
def __init__(self, username):
zoto_base_page.__init__(self)
self.username = username.lower()
def _get_browse_username(self, ctx):
return self.username
def render_my_photo_link(self, ctx, data):
return '/%s/photos/' % self.username
# def get_avatar_permission():
# def handle_info(perm_info):
# if perm_info.get('view_flag', 3):
#
# d = self.app.api.permissions.get_image_permissions(self.username, user_info['avatar_id'])
# d.addCallback(handle_info)
def avatar_handler(self, ctx, size):
request = inevow.IRequest(ctx)
color_option = self._get_color_option(ctx)
bg, fg = color_option.split("_")
def handle_avatar_display(result):
if not result['can_view']:
# generic avatar
return redirectTo('/image/avatar-%d.jpg' % size, request)
else:
# browser has permission to view avatar, so show it
new_segments = [str(size), self.avatar_id]
handler = dyn_image_handler(self.username, self.app, self.log)
handler.set_segments(new_segments)
return handler
def get_auth_username(self):
d2 = Deferred()
auth_hash = request.getCookie('auth_hash')
if auth_hash:
self.auth_username = auth_hash.split(':')[0].lower()
else:
self.auth_username = ""
d2.callback(0)
return d2
# look up id
def handle_info(result):
if result[0] != 0:
return redirectTo('/image/avatar-%d.jpg' % size, request)
user_info = result[1]
# Does the user have an avatar selected
if user_info.get('avatar_id', None):
self.avatar_id = user_info['avatar_id']
# then check if username can view it
d3 = self.app.db.query("""
SELECT zoto_user_can_view_media(
zoto_get_user_id(%s),
zoto_get_image_id(zoto_get_user_id(%s), %s),
zoto_get_user_id(%s)
) AS can_view
""", (self.username, self.username, user_info['avatar_id'], self.auth_username), single_row=True)
d3.addCallback(handle_avatar_display)
return d3
else:
# generic avatar
return redirectTo('/image/bg_%s/%s/avatar-%d.jpg' % (bg, fg, size), request)
def get_user_info(result):
if result[0] != 0:
return redirectTo('/image/bg_%s/%s/avatar-%d.jpg' % (bg, fg, size), request)
return self.app.api.users.get_info(result[1], result[1])
d = get_auth_username(self)
d.addCallback(lambda _: self.app.api.users.get_user_id(self.username))
d.addCallback(get_user_info)
d.addCallback(handle_info)
return d
def child_img(self, ctx):
return dyn_image_handler(self.username, self.app, self.log)
def child_feeds(self, ctx):
return feeds(self.username, self.app, self.log)
def child_albums(self, ctx):
return user_albums(self.username)
def child_galleries(self, ctx):
return user_galleries(self.username)
def child_publish(self, ctx):
return user_publish(self.username)
def child_avatar_small(self, ctx):
return self.avatar_handler(ctx, 11)
def child_avatar_large(self, ctx):
return self.avatar_handler(ctx, 18)
def child_other_sizes(self, ctx):
return other_sizes(self.username)
def childFactory(self, ctx, name):
if name == "":
return self
setattr(user_homepage, "child_avatar.jpg", user_homepage.child_avatar_large)
setattr(user_homepage, "child_avatar-small.jpg", user_homepage.child_avatar_small)
|
[
"[email protected]"
] | |
7e19b1f65c28d7e8d33d9f9df1406f25cab5200c
|
fb652a77dd6dba0c971ac052271e1e03ff7c0d6e
|
/settings/migrations/0008_auto_20181113_0656.py
|
9d9fe0d0ca6b8a4d8782e2b78f2ff35543fb2503
|
[] |
no_license
|
ryosuwito/mt-commerce
|
5fa8419650d8c089bc8baf75322389141b4522af
|
90359dada36ab903dbf30f3ab6616a3c4ed3f655
|
refs/heads/master
| 2022-12-11T05:22:31.037960 | 2019-03-07T10:59:58 | 2019-03-07T10:59:58 | 169,371,940 | 0 | 0 | null | 2022-12-08T01:21:07 | 2019-02-06T07:56:27 |
HTML
|
UTF-8
|
Python
| false | false | 592 |
py
|
# Generated by Django 2.0.8 on 2018-11-13 06:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('settings', '0007_auto_20181113_0655'),
]
operations = [
migrations.AlterField(
model_name='footerlink',
name='addr',
field=models.CharField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='headerlink',
name='addr',
field=models.CharField(blank=True, max_length=400, null=True),
),
]
|
[
"[email protected]"
] | |
1055e5be5726757fa80bb3ebe93883dab06b38ae
|
f8ca9769a359795dc649c46e299cd9b3bfca1864
|
/forum/spiders/cancer_cancerforums_spider.py
|
f3a2c67fe128c84d117d7c3248a56d9a73bcdde2
|
[] |
no_license
|
florencefantine/ehealth_scraper
|
d4093a67543f653de200e6610eaaf65842aa322f
|
886ed12d0a605584796dea11b532883c1e86d09a
|
refs/heads/master
| 2021-01-10T11:39:58.270130 | 2015-11-24T22:09:34 | 2015-11-24T22:09:34 | 45,852,359 | 1 | 6 | null | 2015-11-24T20:52:28 | 2015-11-09T16:51:54 |
Python
|
UTF-8
|
Python
| false | false | 3,237 |
py
|
# -*- coding: utf-8 -*-
import scrapy
import hashlib
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors import LinkExtractor
from scrapy.selector import Selector
from forum.items import PostItemsList
import re
from bs4 import BeautifulSoup
import logging
import string
import dateparser
import time
# Spider for crawling Adidas website for shoes
class ForumsSpider(CrawlSpider):
name = "cancer_cancerforums_spider"
allowed_domains = ["www.cancerforums.net"]
start_urls = [
"http://www.cancerforums.net/forums/13-Lung-Cancer-Forum",
"http://www.cancerforums.net/forums/14-Prostate-Cancer-Forum"
]
rules = (
# Rule to go to the single product pages and run the parsing function
# Excludes links that end in _W.html or _M.html, because they point to
# configuration pages that aren't scrapeable (and are mostly redundant anyway)
Rule(LinkExtractor(
restrict_xpaths='//h3/a[@class="title"]',
), callback='parsePostsList'),
# Rule to follow arrow to next product grid
Rule(LinkExtractor(
restrict_xpaths='//span[@class="prev_next"]/a[@rel="next"]'
), follow=True),
)
def cleanText(self, str):
soup = BeautifulSoup(str, 'html.parser')
return re.sub(" +|\n|\r|\t|\0|\x0b|\xa0",' ',soup.get_text()).strip()
def getDate(self,date_str):
# date_str="Fri Feb 12, 2010 1:54 pm"
try:
date = dateparser.parse(date_str)
epoch = int(date.strftime('%s'))
create_date = time.strftime("%Y-%m-%d'T'%H:%M%S%z", time.gmtime(epoch))
return create_date
except Exception:
#logging.error(">>>>>"+date_str)
return date_str
# https://github.com/scrapy/dirbot/blob/master/dirbot/spiders/dmoz.py
# https://github.com/scrapy/dirbot/blob/master/dirbot/pipelines.py
def parsePostsList(self,response):
sel = Selector(response)
posts = sel.xpath('//ol[@class="posts"]/li[@class="postbitlegacy postbitim postcontainer old"]')
condition = "cancer"
items = []
topic = response.xpath('//h1/span[@class="threadtitle"]/a/text()').extract_first()
url = response.url
for post in posts:
item = PostItemsList()
item['author'] = post.xpath('.//div[@class="popupmenu memberaction"]/a/strong/text()').extract_first()
item['author_link'] = post.xpath('.//div[@class="popupmenu memberaction"]/a/@href').extract_first()
item['condition'] = condition
item['create_date'] = self.getDate(post.xpath('.//span[@class="date"]/text()').extract_first().replace(',','').strip())
item['domain'] = "".join(self.allowed_domains)
item['post'] = re.sub(r'\s+',' ',self.cleanText(" ".join(post.xpath('.//div[@class="content"]//blockquote/text()').extract())))
# item['tag']=''
item['topic'] = topic
item['url']=url
items.append(item)
return items
|
[
"[email protected]"
] | |
9618d0ea8bcd0e39563734aa75c10cfbf72a6dde
|
bbd69601912a3361d788efd03a47f9d4e3bac09e
|
/unittests/test_propgriddefs.py
|
ba6c67748213bd2fab4de8c1bcd982fff4d23034
|
[] |
no_license
|
wxWidgets/Phoenix
|
56929484460a0399a8f1d9582bc77c20aa14748d
|
a1184286703cf24c4b88e5bc14cf2979c1b1ea00
|
refs/heads/master
| 2023-09-01T07:10:17.437093 | 2023-08-31T05:38:01 | 2023-08-31T05:38:01 | 5,078,061 | 2,268 | 677 | null | 2023-09-09T17:06:59 | 2012-07-17T06:22:25 |
Python
|
UTF-8
|
Python
| false | false | 1,278 |
py
|
import unittest
from unittests import wtc
import wx
import wx.propgrid as pg
#---------------------------------------------------------------------------
class propgriddefs_Tests(wtc.WidgetTestCase):
def test_propgriddefs1(self):
pg.PG_INVALID_VALUE
pg.PG_DONT_RECURSE
pg.PG_BASE_OCT
pg.PG_BASE_DEC
pg.PG_BASE_HEX
pg.PG_BASE_HEXL
pg.PG_PREFIX_NONE
pg.PG_PREFIX_0x
pg.PG_PREFIX_DOLLAR_SIGN
pg.PG_KEEP_STRUCTURE
pg.PG_RECURSE
pg.PG_INC_ATTRIBUTES
pg.PG_RECURSE_STARTS
pg.PG_FORCE
pg.PG_SORT_TOP_LEVEL_ONLY
pg.PG_FULL_VALUE
pg.PG_REPORT_ERROR
pg.PG_PROPERTY_SPECIFIC
pg.PG_EDITABLE_VALUE
pg.PG_COMPOSITE_FRAGMENT
pg.PG_UNEDITABLE_COMPOSITE_FRAGMENT
pg.PG_VALUE_IS_CURRENT
pg.PG_PROGRAMMATIC_VALUE
pg.PG_SETVAL_REFRESH_EDITOR
pg.PG_SETVAL_AGGREGATED
pg.PG_SETVAL_FROM_PARENT
pg.PG_SETVAL_BY_USER
pg.PG_LABEL
pg.PG_LABEL_STRING
pg.PG_NULL_BITMAP
pg.PG_COLOUR_BLACK
pg.PG_DEFAULT_IMAGE_SIZE
#---------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
6038afd572bebeb6555821f05e0710b04f59d809
|
98c6ea9c884152e8340605a706efefbea6170be5
|
/examples/data/Assignment_3/wllgar015/question3.py
|
73caabd33916af860b5d886602e949a64214f59d
|
[] |
no_license
|
MrHamdulay/csc3-capstone
|
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
|
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
|
refs/heads/master
| 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,807 |
py
|
#question 3
message=input("Enter the message:\n")
repeat=eval(input("Enter the message repeat count:\n"))
frame=eval(input("Enter the frame thickness:\n"))
def awesome(message, repeat, frame):
#Frame 1 is two characters greater than the input string
#Printing the string messages would work quite well.
#The repeat is how many lines of the top frame you will print.
#For example, repeat = 2, you will need to print 2 lines worth of frame
#at the top and two at the bottom. The 2 also reflects the lines you would need
#at the sides of the message itself.
if frame>0:
print("+","-"*(len(message)+2*frame),"+",sep="")
#counters help a lot ;)
count=1
dcount=1
#first loop for top frame
for i in range(frame-1): #it is frame-1 because we have printed one of the frames already. range(frame) will result in an extra unneccessary line.
print("|"*(count),"+",(len(message)+2*frame-2*dcount)*"-","+","|"*(count),sep="")
count+=1
dcount+=1
#second loop for message
for i in range(repeat):
print("|"*frame,message,"|"*frame)
#third loop for bottom frame which is the inverse of the top loop
count=frame-1
dcount=frame-1
#first loop for top frame
for i in range(frame-1): #it is frame-1 because we have printed one of the frames already. range(frame) will result in an extra unneccessary line.
print("|"*(count),"+",(len(message)+2*frame-2*dcount)*"-","+","|"*(count),sep="")
count-=1
dcount-=1
if frame>0:
print("+","-"*(len(message)+2*frame),"+",sep="")
awesome(message, repeat, frame)
|
[
"[email protected]"
] | |
dc11c2a9a91ce330d48bdf58adb1905e8abc7e5f
|
219b7903ad9b16acb4790f561952021e60f23abe
|
/giftexchange/admin.py
|
671384fdd7635bf9f0761dcda08bdcce3a118864
|
[] |
no_license
|
astromitts/gifterator3000
|
40663de82526ef874c05d9385f53e6c2e3cb1625
|
64a8c420eb7b729c96861aa430f7f15cbe499d3d
|
refs/heads/master
| 2023-01-28T06:47:09.110323 | 2020-12-04T01:43:53 | 2020-12-04T01:43:53 | 293,815,931 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 608 |
py
|
from django.contrib import admin
from giftexchange.models import (
GiftExchange,
Participant,
AppUser,
ExchangeAssignment,
AppInvitation,
MagicLink
)
class ParticipantInline(admin.TabularInline):
model = Participant
fields = ['appuser']
class GiftExchangeAdmin(admin.ModelAdmin):
inlines = [
ParticipantInline,
]
list_display = ['title', 'date']
admin.site.register(AppUser)
admin.site.register(GiftExchange, GiftExchangeAdmin)
admin.site.register(Participant)
admin.site.register(ExchangeAssignment)
admin.site.register(AppInvitation)
admin.site.register(MagicLink)
|
[
"[email protected]"
] | |
ed6deb8cf63883ec1dce2f0f037fd6877ffbaea1
|
c8c9278ffb74da44789b310540693c66468b998c
|
/shop/urls.py
|
87f494b760e5ec8a18cf0fe7c69c724930fc0633
|
[] |
no_license
|
danielspring-crypto/ekit
|
bdfcec4b2ce8e36bb9e692f7a825bfce0bbf4166
|
560f6fa5522e1cb4a2cf30325b2b1b07beceea3d
|
refs/heads/main
| 2023-01-28T00:20:22.058791 | 2020-12-07T08:33:06 | 2020-12-07T08:33:06 | 319,253,641 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 301 |
py
|
from django.urls import path
from .import views
app_name = 'shop'
urlpatterns = [
path('', views.product_list, name='product_list'),
path('<slug:category_slug>/', views.product_list, name='product_list_by_category'),
path('<int:id>/<slug:slug>/', views.product_detail, name='product_detail'),
]
|
[
"[email protected]"
] | |
1f44b4b6d3e46f04b442fb65029b4ba093647a51
|
9ae936a9689832a5b22cd12998c5dc5047aee164
|
/December_2020/December_2020/5_dec_2020/test.py
|
b410a47a0763433f3d18e060eca479c4c3ca3919
|
[] |
no_license
|
inderdevkumar/2020-Python-Projects
|
218320335f352dc340877d1ef62b65605ce4ccfd
|
210154d092021d8de5f30797af9ad8e193e3c68e
|
refs/heads/master
| 2023-02-04T08:47:05.952342 | 2020-12-25T15:33:27 | 2020-12-25T15:33:27 | 322,878,822 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,596 |
py
|
#===================== Function Defination to check prime numbers=================================
from itertools import count, islice
primes = (n for n in count(2) if all(n % d for d in range(2, n)))
print("100th prime is %d" % next(islice(primes, 99, 100)))
#===================== Function Defination to Euler of n =================================
def pi_euler1(n):
count = 0
potentialprime = 3
prime_lists= [] #To store list of prime numbers
deno_list= [] #To store list of denominators which are closed to numerator
product= 1
while count < int(user_input_number):
if primetest(potentialprime) == True:
prime_lists.append(potentialprime) #Appending prime_lists
count += 1
potentialprime += 1
else:
potentialprime += 1
for value in prime_lists:
denominator_list= [i*4 for i in range(1,n)]
denominator= denominator_list[min(range(len(denominator_list)), key = lambda i: abs(denominator_list[i]-value))] #Finding the denominator which is closed to numerator
deno_list.append(denominator) #Appending deno_list
product= product*(value/denominator) #Finding product of expression
print("Prime Lists are: ", prime_lists)
print("Denominator Lists are: ", deno_list)
print(f"pi euler1 for {n} is: ", product*4) #To get the desired output. This calculation is performed
if __name__ == "__main__":
user_input_number= int(input("Enter the number of terms: "))
#pi_euler1(user_input_number)
|
[
"[email protected]"
] | |
3ba42d75b8d7773ba4b0b673f1dbbbdaf2f8c9ec
|
4a41223e8c8ab33d83c6f213692c6097bb96540d
|
/eelbrain/_stats/permutation.py
|
b4e3ec6a2a339c793de3982c33ed7a8d87dbda5e
|
[
"BSD-3-Clause"
] |
permissive
|
rbaehr/Eelbrain
|
33ceeee24533581ab3e7569c31e0f6a6c6dfcda1
|
6301dc256e351fdbb58bbe13ab48fde7bfcf192a
|
refs/heads/master
| 2021-07-05T19:19:20.573231 | 2017-10-03T04:35:23 | 2017-10-03T04:35:23 | 104,907,464 | 0 | 0 | null | 2017-09-26T16:03:20 | 2017-09-26T16:03:20 | null |
UTF-8
|
Python
| false | false | 6,505 |
py
|
# Author: Christian Brodbeck <[email protected]>
from itertools import izip
from math import ceil
import random
import numpy as np
from .._data_obj import NDVar, Var
from .._utils import intervals
_YIELD_ORIGINAL = 0
# for testing purposes, yield original order instead of permutations
def _resample_params(N, samples):
"""Decide whether to do permutations or random resampling
Parameters
----------
N : int
Number of observations.
samples : int
``samples`` parameter (number of resampling iterations, or < 0 to
sample all permutations).
Returns
-------
actual_n_samples : int
Adapted number of resamplings that will be done.
samples_param : int
Samples parameter for the resample function (-1 to do all permutations,
otherwise same as n_samples).
"""
n_perm = 2 ** N
if n_perm - 1 <= samples:
samples = -1
if samples < 0:
n_samples = n_perm - 1
else:
n_samples = samples
return n_samples, samples
def permute_order(n, samples=10000, replacement=False, unit=None, seed=0):
"""Generator function to create indices to shuffle n items
Parameters
----------
n : int
Number of cases.
samples : int
Number of samples to yield. If < 0, all possible permutations are
performed.
replacement : bool
whether random samples should be drawn with replacement or without.
unit : categorial
Factor specifying unit of measurement (e.g. subject). If unit is
specified, resampling proceeds by first resampling the categories of
unit (with or without replacement) and then shuffling the values
within units (no replacement).
seed : None | int
Seed the random state of the relevant randomization module
(:mod:`random` or :mod:`numpy.random`) to make replication possible.
None to skip seeding (default 0).
Returns
-------
Iterator over index.
"""
n = int(n)
samples = int(samples)
if samples < 0:
err = "Complete permutation for resampling through reordering"
raise NotImplementedError(err)
if _YIELD_ORIGINAL:
original = np.arange(n)
for _ in xrange(samples):
yield original
return
if seed is not None:
np.random.seed(seed)
if unit is None:
if replacement:
for _ in xrange(samples):
yield np.random.randint(n, n)
else:
index = np.arange(n)
for _ in xrange(samples):
np.random.shuffle(index)
yield index
else:
if replacement:
raise NotImplementedError("Replacement and units")
else:
idx_orig = np.arange(n)
idx_perm = np.arange(n)
unit_idxs = [np.nonzero(unit == cell)[0] for cell in unit.cells]
for _ in xrange(samples):
for idx_ in unit_idxs:
v = idx_orig[idx_]
np.random.shuffle(v)
idx_perm[idx_] = v
yield idx_perm
def permute_sign_flip(n, samples=10000, seed=0, out=None):
"""Iterate over indices for ``samples`` permutations of the data
Parameters
----------
n : int
Number of cases.
samples : int
Number of samples to yield. If < 0, all possible permutations are
performed.
seed : None | int
Seed the random state of the :mod:`random` module to make replication
possible. None to skip seeding (default 0).
out : array of int8 (n,)
Buffer for the ``sign`` variable that is yielded in each iteration.
Yields
------
sign : array of int8 (n,)
Sign for each case (``1`` or ``-1``; ``sign`` is the same array object
but its content modified in every iteration).
"""
n = int(n)
if seed is not None:
random.seed(seed)
if out is None:
out = np.empty(n, np.int8)
else:
assert out.shape == (n,)
if n > 62: # Python 2 limit for xrange
if samples < 0:
raise NotImplementedError("All possibilities for more than 62 cases")
n_groups = ceil(n / 62.)
group_size = int(ceil(n / n_groups))
for _ in izip(*(permute_sign_flip(stop - start, samples, None,
out[start: stop]) for
start, stop in intervals(range(0, n, group_size) + [n]))):
yield out
return
# determine possible number of permutations
n_perm_possible = 2 ** n
if samples < 0:
# do all permutations
sample_sequences = xrange(1, n_perm_possible)
else:
# random resampling
sample_sequences = random.sample(xrange(1, n_perm_possible), samples)
for seq in sample_sequences:
out.fill(1)
for i in (i for i, s in enumerate(bin(seq)[-1:1:-1]) if s == '1'):
out[i] = -1
yield out
def resample(Y, samples=10000, replacement=False, unit=None, seed=0):
"""
Generator function to resample a dependent variable (Y) multiple times
Parameters
----------
Y : Var | NDVar
Variable which is to be resampled.
samples : int
Number of samples to yield. If < 0, all possible permutations are
performed.
replacement : bool
whether random samples should be drawn with replacement or without.
unit : categorial
Factor specifying unit of measurement (e.g. subject). If unit is
specified, resampling proceeds by first resampling the categories of
unit (with or without replacement) and then shuffling the values
within units (no replacement).
seed : None | int
Seed the random state of the relevant randomization module
(:mod:`random` or :mod:`numpy.random`) to make replication possible.
None to skip seeding (default 0).
Returns
-------
Iterator over Y_resampled. One copy of ``Y`` is made, and this copy is
yielded in each iteration with shuffled data.
"""
if isinstance(Y, Var):
pass
elif isinstance(Y, NDVar):
if not Y.has_case:
raise ValueError("Need NDVar with cases")
else:
raise TypeError("Need Var or NDVar")
out = Y.copy('{name}_resampled')
for index in permute_order(len(out), samples, replacement, unit, seed):
out.x[index] = Y.x
yield out
|
[
"[email protected]"
] | |
01ebc7eb291e960ec42d8dc23255d61ec4d1af5c
|
71a28d4bc1c0f32dc9185332ba2142ba823d3e53
|
/core/urls.py
|
b9dba265ed605e2da34c2ae8c0cdaf56b88c141d
|
[] |
no_license
|
ekeydar/train_stops_map
|
51b1e3a86967851ea16f2e822867f881b91d24fe
|
acbc1a1a250ca6c3e7f5dde8932301bd4b67c96d
|
refs/heads/master
| 2016-09-12T21:17:13.771035 | 2016-05-27T12:54:46 | 2016-05-27T12:54:46 | 59,438,833 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 209 |
py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^stops/$', views.StopList.as_view(), name='stops'),
url(r'^stops/(?P<pk>\d+)/$', views.StopDetail.as_view(), name='stop'),
]
|
[
"[email protected]"
] | |
ec6e6ad22ab5a96295739ceb492638e945ef5cdd
|
2734b77a68f6d7e22e8b823418ad1c59fe1a34af
|
/opengever/dossier/upgrades/to4303.py
|
0a0901f85458945e5a68304d5dff1c762b0b9236
|
[] |
no_license
|
4teamwork/opengever.core
|
5963660f5f131bc12fd0a5898f1d7c8f24a5e2b1
|
a01bec6c00d203c21a1b0449f8d489d0033c02b7
|
refs/heads/master
| 2023-08-30T23:11:27.914905 | 2023-08-25T14:27:15 | 2023-08-25T14:27:15 | 9,788,097 | 19 | 8 | null | 2023-09-14T13:28:56 | 2013-05-01T08:28:16 |
Python
|
UTF-8
|
Python
| false | false | 964 |
py
|
from ftw.upgrade import ProgressLogger
from ftw.upgrade import UpgradeStep
from opengever.dossier.templatedossier import TemplateDossier
from plone import api
from zope.event import notify
from zope.lifecycleevent import ObjectModifiedEvent
class MigrateTemplateDossierClass(UpgradeStep):
def __call__(self):
self.setup_install_profile('profile-opengever.dossier.upgrades:4303')
self.migrate_template_dossiers()
def migrate_template_dossiers(self):
catalog = api.portal.get_tool('portal_catalog')
brains = catalog.unrestrictedSearchResults(
portal_type='opengever.dossier.templatedossier')
with ProgressLogger('Migrating templatedossier class', brains) as step:
for brain in brains:
self.migrate_object(brain.getObject())
step()
def migrate_object(self, obj):
self.migrate_class(obj, TemplateDossier)
notify(ObjectModifiedEvent(obj))
|
[
"[email protected]"
] | |
f898bc011b7d9345fbef96b0f970ceb599a3409a
|
6f1d57238f3b395b04696a16768bcc507f00630c
|
/A_Comparing_Strings.py
|
283a2e89d3cfcd5b5fe35998a4154d040395da59
|
[] |
no_license
|
FazleRabbbiferdaus172/Codeforces_Atcoder_Lightoj_Spoj
|
024a4a2a627de02e4698709d6ab86179b8301287
|
6465e693337777e7bd78ef473b4d270ce757a3a2
|
refs/heads/master
| 2023-07-01T06:32:14.775294 | 2021-07-27T17:07:37 | 2021-07-27T17:07:37 | 271,202,781 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 396 |
py
|
import sys
s1 = list(input())
s2 = list(input())
d1, d2 = dict(), dict()
miss = 0
if len(s1) != len(s2):
print("NO")
sys.exit(0)
for i in range(97, 97+26):
d1[chr(i)] = 0
d2[chr(i)] = 0
for i in range(len(s1)):
d1[s1[i]] += 1
d2[s2[i]] += 1
if s1[i] != s2[i]:
miss += 1
if d1 == d2 and miss == 2:
print("YES")
else:
print("NO")
|
[
"[email protected]"
] | |
213d25bf84577a6d3302247cb04c2a0af37c66c0
|
1abd2d4fe2f01584bf0aab44d7e98e76f7280f9f
|
/setup.py
|
5f1ae46c490365023591c75ca903926ea2fd28c3
|
[] |
no_license
|
yychuang/GenIce
|
b370c046cb4eec134ab80f7faa36aeb00f52786e
|
80b836df7208be3d830bd276924a0a91635eded7
|
refs/heads/main
| 2023-06-06T18:24:23.242385 | 2021-06-28T08:57:25 | 2021-06-28T08:57:25 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,652 |
py
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
import os
import codecs
import re
# Copied from wheel package
here = os.path.abspath(os.path.dirname(__file__))
#README = codecs.open(os.path.join(here, 'README.txt'), encoding='utf8').read()
#CHANGES = codecs.open(os.path.join(here, 'CHANGES.txt'), encoding='utf8').read()
with codecs.open(os.path.join(os.path.dirname(__file__), 'genice2', '__init__.py'),
encoding='utf8') as version_file:
metadata = dict(
re.findall(
r"""__([a-z]+)__ = "([^"]+)""",
version_file.read()))
long_desc = "".join(open("README.md").readlines())
with open("requirements.txt") as f:
requires = [x.strip() for x in f.readlines()]
setup(name='GenIce2',
python_requires='>=3.6',
version=metadata['version'],
description='A Swiss army knife to generate hydrogen-disordered ice structures.',
long_description=long_desc,
long_description_content_type="text/markdown",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
],
author='Masakazu Matsumoto',
author_email='[email protected]',
url='https://github.com/vitroid/GenIce/',
keywords=['genice2', ],
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': [
'genice2 = genice2.cli.genice:main',
'analice2 = genice2.cli.analice:main'
]
}
)
|
[
"[email protected]"
] | |
83e984bf7313b99dd2e24c39e24640b35d45e344
|
a2706c66c4f2769c00fc5f67e1a85742cfa7e17c
|
/WebSocket/Handle/console.py
|
71d912effb8fa4a6b590eaf8e05ac2ba4968e4fc
|
[
"BSD-3-Clause"
] |
permissive
|
Jeromeyoung/viperpython
|
48800312dcbdde17462d28d45865fbe71febfb11
|
ba794ee74079285be32191e898daa3e56305c8be
|
refs/heads/main
| 2023-09-01T18:59:23.464817 | 2021-09-26T04:05:36 | 2021-09-26T04:05:36 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,131 |
py
|
# -*- coding: utf-8 -*-
# @File : console.py
# @Date : 2021/2/26
# @Desc :
from Lib.configs import RPC_FRAMEWORK_API_REQ
from Lib.log import logger
from Lib.method import Method
from Lib.rpcclient import RpcClient
from Lib.xcache import Xcache
class Console(object):
def __init__(self):
pass
@staticmethod
def get_active_console():
result = RpcClient.call(Method.ConsoleList, [], timeout=RPC_FRAMEWORK_API_REQ)
if result is None:
Xcache.set_console_id(None)
return False
else:
consoles = result.get("consoles")
if len(consoles) == 0:
consoles_create_opt = {"SkipDatabaseInit": True, 'AllowCommandPassthru': False}
result = RpcClient.call(Method.ConsoleCreate, [consoles_create_opt], timeout=RPC_FRAMEWORK_API_REQ)
if result is None:
Xcache.set_console_id(None)
return False
else:
active_id = int(result.get("id"))
Xcache.set_console_id(active_id)
return True
else:
active_id = int(consoles[0].get("id"))
Xcache.set_console_id(active_id)
return True
@staticmethod
def reset_active_console():
result = RpcClient.call(Method.ConsoleList, [], timeout=RPC_FRAMEWORK_API_REQ)
if result is None:
Xcache.set_console_id(None)
else:
consoles = result.get("consoles")
if len(consoles) == 0:
pass
else:
for console in consoles: # 删除已知命令行
cid = int(console.get("id"))
params = [cid]
RpcClient.call(Method.ConsoleDestroy, params, timeout=RPC_FRAMEWORK_API_REQ)
result = RpcClient.call(Method.ConsoleCreate, timeout=RPC_FRAMEWORK_API_REQ)
if result is None:
Xcache.set_console_id(None)
else:
active_id = int(result.get("id"))
Xcache.set_console_id(active_id)
@staticmethod
def write(data=None):
cid = Xcache.get_console_id()
if cid is None:
get_active_console_result = Console.get_active_console()
if get_active_console_result:
cid = Xcache.get_console_id()
else:
return False, None
params = [cid, data.replace("\r\n", "\n")]
result = RpcClient.call(Method.ConsoleWrite, params, timeout=RPC_FRAMEWORK_API_REQ)
if result is None or result.get("result") == "failure":
get_active_console_result = Console.get_active_console()
if get_active_console_result:
cid = Xcache.get_console_id()
params = [cid, data.replace("\r\n", "\n")]
result = RpcClient.call(Method.ConsoleWrite, params, timeout=RPC_FRAMEWORK_API_REQ)
if result is None or result.get("result") == "failure":
return False, None
else:
return True, result
else:
return False, result
else:
return True, result
@staticmethod
def read():
cid = Xcache.get_console_id()
if cid is None:
return False, {}
params = [cid]
result = RpcClient.call(Method.ConsoleRead, params, timeout=RPC_FRAMEWORK_API_REQ)
if result is None:
return False, {}
elif result.get("result") == "failure":
logger.warning(f"Cid: {cid}错误")
return False, {}
else:
return True, result
@staticmethod
def tabs(line=None):
cid = Xcache.get_console_id()
if cid is None:
return False, {}
params = [cid, line]
result = RpcClient.call(Method.ConsoleTabs, params, timeout=RPC_FRAMEWORK_API_REQ)
if result is None or result.get("result") == "failure":
logger.warning(f"Cid: {cid}错误")
return False, {}
else:
return True, result
@staticmethod
def session_detach():
cid = Xcache.get_console_id()
if cid is None:
return False, {}
params = [cid]
result = RpcClient.call(Method.ConsoleSessionDetach, params, timeout=RPC_FRAMEWORK_API_REQ)
if result is None:
return False, {}
elif result.get("result") == "failure":
logger.warning(f"Cid: {cid}错误")
return False, {}
else:
return True, result
@staticmethod
def session_kill():
cid = Xcache.get_console_id()
if cid is None:
return False, {}
params = [cid]
result = RpcClient.call(Method.ConsoleSessionKill, params, timeout=RPC_FRAMEWORK_API_REQ)
if result is None:
return False, {}
elif result.get("result") == "failure":
logger.warning(f"Cid: {cid}错误")
return False, {}
else:
return True, result
|
[
"[email protected]"
] | |
c03381a3eb66d32c05604a2226fbaea846f8e98c
|
c7a6f8ed434c86b4cdae9c6144b9dd557e594f78
|
/ECE364/.PyCharm40/system/python_stubs/348993582/dbm.py
|
ac53fcf86f8ad981eb28108dc15d60478a2542de
|
[] |
no_license
|
ArbalestV/Purdue-Coursework
|
75d979bbe72106975812b1d46b7d854e16e8e15e
|
ee7f86145edb41c17aefcd442fa42353a9e1b5d1
|
refs/heads/master
| 2020-08-29T05:27:52.342264 | 2018-04-03T17:59:01 | 2018-04-03T17:59:01 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 667 |
py
|
# encoding: utf-8
# module dbm
# from /usr/lib64/python2.6/lib-dynload/dbm.so
# by generator 1.136
# no doc
# no imports
# Variables with simple values
library = 'GNU gdbm'
# functions
def open(path, flag=None, mode=None): # real signature unknown; restored from __doc__
"""
open(path[, flag[, mode]]) -> mapping
Return a database object.
"""
pass
# classes
class error(Exception):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
|
[
"[email protected]"
] | |
f2d88ede145a55a634404601a3248fdd20b69f0c
|
45de3aa97525713e3a452c18dcabe61ac9cf0877
|
/src/primaires/perso/stat.py
|
376ea04b6dc9b0fedb3545626465b19dc441e1d2
|
[
"BSD-3-Clause"
] |
permissive
|
stormi/tsunami
|
95a6da188eadea3620c70f7028f32806ee2ec0d1
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
refs/heads/master
| 2020-12-26T04:27:13.578652 | 2015-11-17T21:32:38 | 2015-11-17T21:32:38 | 25,606,146 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,954 |
py
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Ce fichier contient la classe Stat, détaillée plus bas."""
from abstraits.obase import BaseObj
from primaires.perso.exceptions.stat import *
# Flags :
NX = 0 # aucune exception ne sera levée
I0 = 1 # lève une exception si strictement inférieure à 0
IE0 = 2 # lève une exception si inférieure ou égale à 0
SM = 4 # lève une exception si strictement supérieure au MAX
SEM = 8 # lève une exception si supérieure ou égale au MAX
class Stat(BaseObj):
"""Cette classe définit une stat (ou caractéristique).
Les attributs d'une stat sont :
nom -- son nom
symbole -- son symbole (utile pour le prompt)
defaut -- sa valeur par défaut, celle donnée à un joueur à sa création
marge -- la marge maximale
max -- une chaîne de caractère représentant une autre stat
flags -- les flags indiquant quand une exception doit être levée
parent -- le parent hébergeant les stats
"""
_nom = "stat"
_version = 1
def __init__(self, nom, symbole, defaut, marge, max, flags=I0, parent=None):
"""Constructeur d'une stat.
Elle prend les mêmes paramètres que ceux passés dans l'ordre, dans
la configuration.
Voir : ./cfg_stats.py
"""
BaseObj.__init__(self)
self.nom = nom
self.symbole = symbole
self.defaut = defaut
self.marge_min = 0
self.marge_max = marge
self.nom_max = max
self.flags = flags
self.parent = parent
# Valeurs
self.__base = self.defaut
self.__variable = 0
self.__max = None
if self.parent and max:
self.__max = getattr(self.parent, "_{}".format(max))
self._construire()
def __getnewargs__(self):
return ("", "", "", 0, "")
def __repr__(self):
return "<stat {}={}>".format(self.nom, self.courante)
def __str__(self):
return "{}={} (base={}, variable={}, max={})".format(
self.nom, self.courante, self.base, self.variable, self.max)
@property
def base(self):
return self.__base
def _get_variable(self):
return self.__variable
def _set_variable(self, variable):
self.__variable = variable
variable = property(_get_variable, _set_variable)
@property
def max(self):
max = self.__max
if max:
max = max.courante
return max
def _get_courante(self):
return self.__base + self.__variable
def _set_courante(self, courante):
"""C'est dans cette propriété qu'on change la valeur courante
de la stat.
On passe par une méthode 'set' qui fait le travail.
"""
self.set(courante, self.flags)
courante = property(_get_courante, _set_courante)
def set(self, courante, flags):
"""Modifie la stat courante.
C'est dans cette méthode qu'on lève des exceptions en fonction des
valeurs modifiées.
NOTE IMPORTANTE: la valeur est modifiée quelque soit l'exception
levée. L'exception est levée pour réagir à un certain comportement
(par exemple, le joueur n'a plus de vitalité) mais elle n'empêchera
pas la stat d'être modifiée.
En revanche, on test bel et bien que la stat de base ne dépasse ni
le max ni la marge.
"""
base = courante - self.__variable
if self.parent and self.parent.parent and \
not self.parent.parent.est_immortel():
# Levée d'exceptions
if base < 0 and flags & I0:
self.__base = 0
raise StatI0
if base <= 0 and flags & IE0:
self.__base = 0
raise StatIE0
if self.max and flags & SM and base > self.max:
raise StatSM
if self.max and flags & SEM and base >= self.max:
raise StatSEM
if base > self.marge_max:
base = self.marge_max
if base < self.marge_min:
base = self.marge_min
if self.max and base > self.max:
base = self.max
if self.parent and self.parent.parent and \
self.parent.parent.est_immortel() and self.max:
base = self.max
self.__base = base
def __setattr__(self, nom, val):
BaseObj.__setattr__(self, nom, val)
|
[
"[email protected]"
] | |
eebaf1cc5939bf3397f44b7abae4b3301b3f9927
|
f9d564f1aa83eca45872dab7fbaa26dd48210d08
|
/huaweicloud-sdk-cc/huaweicloudsdkcc/v3/model/update_network_instance_request_body.py
|
ccb6925b1dca4650bfa9c81651ceef569cd52c3e
|
[
"Apache-2.0"
] |
permissive
|
huaweicloud/huaweicloud-sdk-python-v3
|
cde6d849ce5b1de05ac5ebfd6153f27803837d84
|
f69344c1dadb79067746ddf9bfde4bddc18d5ecf
|
refs/heads/master
| 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 |
NOASSERTION
| 2023-06-22T14:50:48 | 2020-05-08T02:28:43 |
Python
|
UTF-8
|
Python
| false | false | 3,407 |
py
|
# coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class UpdateNetworkInstanceRequestBody:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'network_instance': 'UpdateNetworkInstance'
}
attribute_map = {
'network_instance': 'network_instance'
}
def __init__(self, network_instance=None):
"""UpdateNetworkInstanceRequestBody
The model defined in huaweicloud sdk
:param network_instance:
:type network_instance: :class:`huaweicloudsdkcc.v3.UpdateNetworkInstance`
"""
self._network_instance = None
self.discriminator = None
self.network_instance = network_instance
@property
def network_instance(self):
"""Gets the network_instance of this UpdateNetworkInstanceRequestBody.
:return: The network_instance of this UpdateNetworkInstanceRequestBody.
:rtype: :class:`huaweicloudsdkcc.v3.UpdateNetworkInstance`
"""
return self._network_instance
@network_instance.setter
def network_instance(self, network_instance):
"""Sets the network_instance of this UpdateNetworkInstanceRequestBody.
:param network_instance: The network_instance of this UpdateNetworkInstanceRequestBody.
:type network_instance: :class:`huaweicloudsdkcc.v3.UpdateNetworkInstance`
"""
self._network_instance = network_instance
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, UpdateNetworkInstanceRequestBody):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
6ff299c2cc6c8b9893253d3ebe9d3ea491400c72
|
60be3894ad491bde502b8f6909a026ee115d952e
|
/aiosmb/network/tcp.py
|
3da2e5cbc315e7cbcfde7804212c83c4942ef225
|
[] |
no_license
|
topotam/aiosmb
|
7c97c6a9806c84a9fae28fa372cc6903fa6ec0c5
|
e2ece67bbf380f576b154b09ea5fd63d9b4ecf4c
|
refs/heads/master
| 2023-06-25T17:41:03.605226 | 2021-07-27T18:31:12 | 2021-07-27T18:31:12 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,676 |
py
|
import enum
import asyncio
from aiosmb import logger
from aiosmb.commons.exceptions import *
class TCPSocket:
"""
Generic asynchronous TCP socket class, nothing SMB related.
Creates the connection and channels incoming/outgoing bytes via asynchonous queues.
"""
def __init__(self, socket = None, target = None):
self.settings = target
self.socket = socket #for future, if we want a custom soscket
self.reader = None
self.writer = None
self.out_queue = asyncio.Queue()
self.in_queue = asyncio.Queue()
self.disconnected = asyncio.Event()
self.incoming_task = None
self.outgoing_task = None
async def disconnect(self):
"""
Disconnects from the socket.
Stops the reader and writer streams.
"""
if self.disconnected.is_set():
return
if self.outgoing_task is not None:
self.outgoing_task.cancel()
if self.incoming_task is not None:
self.incoming_task.cancel()
if self.writer is not None:
try:
self.writer.close()
except:
pass
self.writer = None
self.reader = None
self.disconnected.set()
async def handle_incoming(self):
"""
Reads data bytes from the socket and dispatches it to the incoming queue
"""
try:
lasterror = None
msgsize = None
while not self.disconnected.is_set():
try:
data = await self.reader.readexactly(4)
msgsize = int.from_bytes(data[1:], byteorder='big', signed = False)
data = await self.reader.readexactly(msgsize)
await self.in_queue.put( (data, None) )
if data == b'':
return
except asyncio.CancelledError as e:
lasterror = e
break
except Exception as e:
logger.debug('[TCPSocket] handle_incoming %s' % str(e))
lasterror = e
break
except asyncio.CancelledError:
return
except Exception as e:
lasterror = e
finally:
if self.in_queue is not None:
await self.in_queue.put( (None, lasterror) )
await self.disconnect()
async def handle_outgoing(self):
"""
Reads data bytes from the outgoing queue and dispatches it to the socket
"""
try:
while not self.disconnected.is_set():
data = await self.out_queue.get()
self.writer.write(data)
await self.writer.drain()
except asyncio.CancelledError:
#the SMB connection is terminating
return
except Exception as e:
logger.exception('[TCPSocket] handle_outgoing %s' % str(e))
await self.disconnect()
#async def connect(self, settings): #not needed parameter because we have networkselector now...
async def connect(self):
"""
Main function to be called, connects to the target specified in settings, and starts reading/writing.
"""
#self.settings = settings
try:
con = asyncio.open_connection(self.settings.get_ip(), self.settings.get_port())
try:
self.reader, self.writer = await asyncio.wait_for(con, int(self.settings.timeout))
except asyncio.TimeoutError:
logger.debug('[TCPSocket] Connection timeout')
raise SMBConnectionTimeoutException('[TCPSocket] Connection timeout')
except ConnectionRefusedError:
logger.debug('[TCPSocket] Connection refused')
raise SMBConnectionRefusedException('[TCPSocket] Connection refused')
except asyncio.CancelledError:
#the SMB connection is terminating
raise asyncio.CancelledError
except Exception as e:
logger.debug('[TCPSocket] connect generic exception')
raise e
self.incoming_task = asyncio.create_task(self.handle_incoming())
self.outgoing_task = asyncio.create_task(self.handle_outgoing())
return True, None
except Exception as e:
try:
self.writer.close()
except:
pass
return False, e
|
[
"[email protected]"
] | |
482928edaa2e06cd3b7bed4f4eaec7daf1bdda60
|
2c74bb301f1ed83b79254944183ac5a18a639fdf
|
/homeassistant/components/esphome/domain_data.py
|
01f0a4d6b1369b6f6908d943c821bb3805e59e57
|
[
"Apache-2.0"
] |
permissive
|
Adminiuga/home-assistant
|
5bec93007ddac1a268cc359bf7e48530c5f73b38
|
dcf68d768e4f628d038f1fdd6e40bad713fbc222
|
refs/heads/dev
| 2023-02-22T22:03:31.013931 | 2022-11-09T00:27:20 | 2022-11-09T00:27:20 | 123,929,062 | 5 | 4 |
Apache-2.0
| 2023-02-22T06:14:31 | 2018-03-05T14:11:09 |
Python
|
UTF-8
|
Python
| false | false | 2,885 |
py
|
"""Support for esphome domain data."""
from __future__ import annotations
from dataclasses import dataclass, field
from typing import TypeVar, cast
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.json import JSONEncoder
from homeassistant.helpers.storage import Store
from .entry_data import RuntimeEntryData
STORAGE_VERSION = 1
DOMAIN = "esphome"
_DomainDataSelfT = TypeVar("_DomainDataSelfT", bound="DomainData")
@dataclass
class DomainData:
"""Define a class that stores global esphome data in hass.data[DOMAIN]."""
_entry_datas: dict[str, RuntimeEntryData] = field(default_factory=dict)
_stores: dict[str, Store] = field(default_factory=dict)
_entry_by_unique_id: dict[str, ConfigEntry] = field(default_factory=dict)
def get_by_unique_id(self, unique_id: str) -> ConfigEntry:
"""Get the config entry by its unique ID."""
return self._entry_by_unique_id[unique_id]
def get_entry_data(self, entry: ConfigEntry) -> RuntimeEntryData:
"""Return the runtime entry data associated with this config entry.
Raises KeyError if the entry isn't loaded yet.
"""
return self._entry_datas[entry.entry_id]
def set_entry_data(self, entry: ConfigEntry, entry_data: RuntimeEntryData) -> None:
"""Set the runtime entry data associated with this config entry."""
if entry.entry_id in self._entry_datas:
raise ValueError("Entry data for this entry is already set")
self._entry_datas[entry.entry_id] = entry_data
if entry.unique_id:
self._entry_by_unique_id[entry.unique_id] = entry
def pop_entry_data(self, entry: ConfigEntry) -> RuntimeEntryData:
"""Pop the runtime entry data instance associated with this config entry."""
if entry.unique_id:
del self._entry_by_unique_id[entry.unique_id]
return self._entry_datas.pop(entry.entry_id)
def is_entry_loaded(self, entry: ConfigEntry) -> bool:
"""Check whether the given entry is loaded."""
return entry.entry_id in self._entry_datas
def get_or_create_store(self, hass: HomeAssistant, entry: ConfigEntry) -> Store:
"""Get or create a Store instance for the given config entry."""
return self._stores.setdefault(
entry.entry_id,
Store(
hass, STORAGE_VERSION, f"esphome.{entry.entry_id}", encoder=JSONEncoder
),
)
@classmethod
def get(cls: type[_DomainDataSelfT], hass: HomeAssistant) -> _DomainDataSelfT:
"""Get the global DomainData instance stored in hass.data."""
# Don't use setdefault - this is a hot code path
if DOMAIN in hass.data:
return cast(_DomainDataSelfT, hass.data[DOMAIN])
ret = hass.data[DOMAIN] = cls()
return ret
|
[
"[email protected]"
] | |
5a1ed0cd70c637628613bcdc2591471ce0eebf24
|
b3c8678c1db0b3e256de97e560d7d4d26c1dd6eb
|
/src/jpl.mcl.site.sciencedata/src/jpl/mcl/site/sciencedata/testing.py
|
4325448b2a43549da50926c2e47a4028a8f43d2d
|
[
"GPL-2.0-only",
"Apache-2.0",
"GPL-1.0-or-later"
] |
permissive
|
MCLConsortium/mcl-site
|
e4a127235504e7ac5575ef3d73c8fd1bdf02824b
|
5eb9c16a7fe322192a03461a9f22ecb8c17307fd
|
refs/heads/master
| 2021-06-09T23:49:35.775652 | 2021-04-30T22:59:59 | 2021-04-30T22:59:59 | 49,965,919 | 1 | 0 |
Apache-2.0
| 2020-08-25T15:58:27 | 2016-01-19T16:24:58 |
Python
|
UTF-8
|
Python
| false | false | 1,896 |
py
|
# encoding: utf-8
from plone.app.testing import PloneSandboxLayer, IntegrationTesting, FunctionalTesting, PLONE_FIXTURE
from . import PACKAGE_NAME
import pkg_resources, urllib2, urllib, httplib, plone.api
class TestSchemeHandler(urllib2.BaseHandler):
u'''A special URL handler for the testing-only scheme ``testscheme``.'''
def testscheme_open(self, req):
try:
selector = req.get_selector()
path = 'tests/data/' + selector.split('/')[-1] + '.json'
if pkg_resources.resource_exists(PACKAGE_NAME, path):
return urllib.addinfourl(
pkg_resources.resource_stream(PACKAGE_NAME, path),
httplib.HTTPMessage(open('/dev/null')),
req.get_full_url(),
200
)
else:
raise urllib2.URLError('Not found')
except Exception:
raise urllib2.URLError('Not found')
class JPLMCLSiteSciencedataLayer(PloneSandboxLayer):
defaultBases = (PLONE_FIXTURE,)
def setUpZope(self, app, configurationContext):
import jpl.mcl.site.sciencedata
self.loadZCML(package=jpl.mcl.site.sciencedata)
urllib2.install_opener(urllib2.build_opener(TestSchemeHandler))
def setUpPloneSite(self, portal):
wfTool = plone.api.portal.get_tool('portal_workflow')
wfTool.setDefaultChain('plone_workflow')
self.applyProfile(portal, 'jpl.mcl.site.sciencedata:default')
JPL_MCL_SITE_SCIENCEDATA_FIXTURE = JPLMCLSiteSciencedataLayer()
JPL_MCL_SITE_SCIENCEDATA_INTEGRATION_TESTING = IntegrationTesting(
bases=(JPL_MCL_SITE_SCIENCEDATA_FIXTURE,),
name='JPLMCLSiteSciencedataLayer:IntegrationTesting'
)
JPL_MCL_SITE_SCIENCEDATA_FUNCTIONAL_TESTING = FunctionalTesting(
bases=(JPL_MCL_SITE_SCIENCEDATA_FIXTURE,),
name='JPLMCLSiteSciencedataLayer:FunctionalTesting'
)
|
[
"[email protected]"
] | |
d555da4df0ff92fad94428138c04e5725366861c
|
47542e6b98c19592f44ce44297771c698d4987f7
|
/ch09/09_08.py
|
42a496a958555522889a656ddd4e96b4f567131b
|
[
"Apache-2.0"
] |
permissive
|
sharebook-kr/book-cryptocurrency
|
235b6998668265ec804451afddd245a52824f51a
|
847ba97ba096c257b35f5e507cd33fa6a0724860
|
refs/heads/master
| 2022-12-14T05:24:52.765589 | 2022-11-30T01:35:08 | 2022-11-30T01:35:08 | 128,632,349 | 162 | 141 |
Apache-2.0
| 2022-11-30T01:35:09 | 2018-04-08T11:05:17 |
Python
|
UTF-8
|
Python
| false | false | 301 |
py
|
import websockets
import asyncio
async def bithumb_ws_client():
uri = "wss://pubwss.bithumb.com/pub/ws"
async with websockets.connect(uri) as websocket:
greeting = await websocket.recv()
print(greeting)
async def main():
await bithumb_ws_client()
asyncio.run(main())
|
[
"[email protected]"
] | |
9a4b55c49ddbd6bf15ee9f95c0e49e1c0aa461d2
|
0c7e54178e89a4dad98deb8265c2cb41ca3399b9
|
/backend/strawberry_forms/tests/test_mutations.py
|
470971ab95ea4f9bf7f8c47ae52ac75af4feafda
|
[
"MIT"
] |
permissive
|
marlenebDC/pycon
|
4394bf7a0aecc5aa8ae0b378900d86c2afc7fab7
|
175f2ee9e8698bbcf15dd33d2eb4739fee04c6d7
|
refs/heads/master
| 2023-07-07T00:34:39.932779 | 2020-01-21T01:00:24 | 2020-01-21T01:00:24 | 235,290,754 | 0 | 0 |
MIT
| 2023-06-23T23:35:11 | 2020-01-21T08:30:15 | null |
UTF-8
|
Python
| false | false | 1,670 |
py
|
import strawberry
from django.forms import Form, IntegerField
from strawberry_forms.mutations import FormMutation
def test_form_mutation_without_context():
class TestForm(Form):
a = IntegerField()
def save(self, *args, **kwargs):
return "hello"
class TestMutation(FormMutation):
class Meta:
form_class = TestForm
@strawberry.input
class TestInput:
a: int
assert TestMutation.Mutation(None, TestInput(a=1)) == "hello"
def test_form_mutation_response_can_be_converted_using_transform_method():
class TestForm(Form):
a = IntegerField()
def save(self, *args, **kwargs):
return "hello"
class TestMutation(FormMutation):
@classmethod
def transform(cls, result):
return "world"
class Meta:
form_class = TestForm
@strawberry.input
class TestInput:
a: int
assert TestMutation.Mutation(None, TestInput(a=1)) == "world"
def test_form_mutation_transform_is_not_required():
class TestForm(Form):
a = IntegerField()
def save(self, *args, **kwargs):
return "hello"
class TestMutation(FormMutation):
class Meta:
form_class = TestForm
@strawberry.input
class TestInput:
a: int
assert TestMutation.Mutation(None, TestInput(a=1)) == "hello"
def test_mutation_without_input():
class TestForm(Form):
def save(self, *args, **kwargs):
return "ciao"
class TestMutation(FormMutation):
class Meta:
form_class = TestForm
assert TestMutation.Mutation(None) == "ciao"
|
[
"[email protected]"
] | |
225c14407e2eba431953f219ed8ecc4582a965c5
|
8b54570140861ffbe464e244f9f49ba55e341577
|
/linux/ovirt-guest-tray.py
|
cdc740419735bb0eb6c99bfa8f3a70c09adf3c55
|
[
"Apache-2.0"
] |
permissive
|
vinzenz/ovirt-guest-agent-tray
|
36569d149b7082e8129fbe5c462869bfeb8bf779
|
581a73f3ff4431a6a17f6ff9bc3d64f2b23ff586
|
refs/heads/master
| 2016-09-06T07:41:07.988384 | 2014-07-30T13:55:46 | 2014-07-30T13:57:24 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,422 |
py
|
#!/usr/bin/env python
import gtk
class TrayIcon:
def __init__(self, *args, **kwargs):
self.icon = gtk.StatusIcon()
self.icon.set_from_file('ovirt-icon-48.svg')
self.icon.connect('popup-menu', self.on_popup_menu)
def on_about(self, *args, **kwargs):
dlg = gtk.Dialog("About the oVirt Guest Agent",
None,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_OK, gtk.RESPONSE_ACCEPT))
label1 = gtk.Label("oVirt Guest Agent for Linux")
label1.show()
label2 = gtk.Label("Version 3.6.0")
label2.show()
label3 = gtk.Label("oVirt Guest Agent is running.")
label3.show()
sw = gtk.ScrolledWindow()
sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
sw.set_shadow_type(gtk.SHADOW_ETCHED_IN)
textview = gtk.TextView()
textview.set_editable(False)
textview.set_cursor_visible(False)
textview.set_sensitive(False)
sw.add(textview)
buffer = textview.get_buffer()
lic = '<Copyright information here>'
try:
f = open('/usr/share/ovirt-guest-agent/COPYING', 'r')
lic = f.read()
f.close()
except (OSError,IOError):
pass
buffer.insert(buffer.get_end_iter(), lic)
textview.show()
sw.show()
dlg.vbox.set_homogeneous(False)
dlg.vbox.pack_start(label1, fill=False, expand=False, padding=4)
dlg.vbox.pack_start(label2, fill=False, expand=False, padding=4)
dlg.vbox.pack_start(sw, fill=True, expand=True, padding=4)
dlg.vbox.pack_start(label3, fill=False, expand=False, padding=4)
dlg.set_default_size(640, 480)
dlg.run()
dlg.destroy()
def on_popup_menu(self, icon, event_button, event_time):
menu = gtk.Menu()
about = gtk.MenuItem('About')
about.show()
about.connect('activate', self.on_about)
menu.append(about)
sep = gtk.SeparatorMenuItem()
sep.show()
menu.append(sep)
quit = gtk.MenuItem('Quit')
quit.show()
menu.append(quit)
quit.connect('activate', gtk.main_quit)
menu.popup(None, None, gtk.status_icon_position_menu, event_button, event_time, self.icon)
if __name__ == '__main__':
icon = TrayIcon()
gtk.main()
|
[
"[email protected]"
] | |
d5cab9a4b1e7e1be3cf13dddebae13f6a4066b74
|
1d9a6406c859fda186f520bb4472c551fc572c7b
|
/src/hopla/hoplalib/zoo/petcontroller.py
|
b5f9e7d0243348d6b233bab73bbf200a2f76e11e
|
[
"Apache-2.0"
] |
permissive
|
rickie/hopla
|
af21b794ce6719d402721550e1ee4091790410b6
|
24a422194e42c03d5877dc167b2b07147326a595
|
refs/heads/main
| 2023-08-13T17:33:03.612293 | 2021-10-12T12:13:25 | 2021-10-12T12:13:25 | 408,538,704 | 0 | 0 |
Apache-2.0
| 2021-09-20T17:30:15 | 2021-09-20T17:30:15 | null |
UTF-8
|
Python
| false | false | 2,211 |
py
|
"""
A module for performing feeding Pet HTTP requests.
"""
from typing import NoReturn, Optional, Union
import requests
from hopla.hoplalib.http import HabiticaRequest, UrlBuilder
from hopla.hoplalib.zoo.feed_clickhelper import get_feed_data_or_exit
from hopla.hoplalib.zoo.zoofeed_algorithms import FeedPlanItem
class FeedPostRequester(HabiticaRequest):
"""
The FeedPostRequester sends a post request to feed a pet.
Note: this API endpoint expects query params instead
of a request body (even though it is a HTTP POST).
[APIDOCS](https://habitica.com/apidoc/#api-User-UserFeed)
"""
_DEFAULT_FOOD_AMOUNT = 1
def __init__(self, *,
pet_name: str,
food_name: str,
food_amount: Optional[int] = _DEFAULT_FOOD_AMOUNT):
self.pet_name = pet_name
self.food_name = food_name
self.query_params = {
"amount": food_amount or FeedPostRequester._DEFAULT_FOOD_AMOUNT
}
@property
def path(self) -> str:
"""Return the URL used to feed a pet"""
return f"/user/feed/{self.pet_name}/{self.food_name}"
@property
def feed_pet_food_url(self) -> str:
"""Return the url to feed a pet"""
return UrlBuilder(path_extension=self.path).url
def post_feed_request(self) -> requests.Response:
"""Performs the feed pet post requests and return the response"""
return requests.post(url=self.feed_pet_food_url, headers=self.default_headers,
params=self.query_params)
def post_feed_request_get_data_or_exit(self) -> Union[NoReturn, dict]:
"""
Performs the feed pet post requests and return
the feed response if successful. Else exit
:return:
"""
response: requests.Response = self.post_feed_request()
return get_feed_data_or_exit(response)
@classmethod
def build_from(cls, feed_item: FeedPlanItem) -> "FeedPostRequester":
"""Create a request from a feed plan item."""
return FeedPostRequester(
pet_name=feed_item.pet_name,
food_name=feed_item.food_name,
food_amount=feed_item.times
)
|
[
"[email protected]"
] | |
e1461c6411425ee974d36267c209f92b7be55c59
|
f6188c0c27da8d2507e832908ba9de524f0b907d
|
/client_speed_test.py
|
b23314ac7daa63ea09593b18003d1800d1583546
|
[] |
no_license
|
lforet/repfibdigit
|
df1df2fe2ba40ede9662120ea94f7d529f5d4abc
|
945ce755fd2526a1a3e242b909b93a79ac4e65fb
|
refs/heads/master
| 2021-01-10T08:43:11.123781 | 2018-12-21T05:32:45 | 2018-12-21T05:32:45 | 8,490,162 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,997 |
py
|
#!/usr/bin/python # This is client.py file
#System modules
import os
import time
import itertools
import cPickle as pickle
import numpy as np
#import cProfile
import timeit
import uuid
import sys
import fib
########################################################################
pgbreak = "-----------------------------------------------"
#----------------------------------------------------------------------
def is_repfibdigit( number_to_test):
n = map(int,str(number_to_test))
while number_to_test > n[0]:
n=n[1:]+[sum(n)]
if (number_to_test == n[0]) & (number_to_test>9):
show_proof(number_to_test)
#raw_input()
#time.sleep(1)
#else:
# print number_to_test, " is NOT a Keith Number"
return
def is_repfibdigit2( number_to_test):
if fib.is_repfibdigit(number_to_test) == True:
show_proof(number_to_test)
#raw_input()
#time.sleep(1)
#else:
# print number_to_test, " is NOT a Keith Number"
return
#this function is to get around the 32bit native int barrier
#not needed in 64 native systems
def my_xrange( start, stop, step):
i = start
while i < stop:
yield i
i += step
def show_proof(kn):
print '---------------------------------------------'
#print 'queue:', threading.current_thread()
print kn, " is a Keith Number!"
print "PROOF:"
n=map(int,str(kn))
while kn > sum(n):
print n ," = ", sum(n)
n=n[1:]+[sum(n)]
print n ," = ", sum(n)
#self.report_keith_num(number_to_test)
#print "new keith number reported!!!!"
print '---------------------------------------------'
print "press ENTER to continue"
########################################################################
if __name__=="__main__":
if len(sys.argv) > 1:
end_num = sys.argv[1]
nowtime = time.clock()
# get num to work from
start_num = 0
print "Starting number:", start_num
for x in xrange(start_num, int(end_num)):
is_repfibdigit2(x)
print
print "completion time:", abs(nowtime - time.clock())
print pgbreak
#raw_input()
|
[
"[email protected]"
] | |
00db2c8c3ed972b7163d98736f55e12ede747a2c
|
50008b3b7fb7e14f793e92f5b27bf302112a3cb4
|
/recipes/Python/577538_Poor_Man_unit_tests/recipe-577538.py
|
a085b12780b5428ff89dbdb43e8d9d754e602175
|
[
"MIT"
] |
permissive
|
betty29/code-1
|
db56807e19ac9cfe711b41d475a322c168cfdca6
|
d097ca0ad6a6aee2180d32dce6a3322621f655fd
|
refs/heads/master
| 2023-03-14T08:15:47.492844 | 2021-02-24T15:39:59 | 2021-02-24T15:39:59 | 341,878,663 | 0 | 0 |
MIT
| 2021-02-24T15:40:00 | 2021-02-24T11:31:15 |
Python
|
UTF-8
|
Python
| false | false | 3,834 |
py
|
#! /usr/bin/env python
######################################################################
# Written by Kevin L. Sitze on 2010-12-03
# This code may be used pursuant to the MIT License.
######################################################################
import sys
import traceback
from types import FloatType, ComplexType
__all__ = (
'assertEquals',
'assertNotEquals',
'assertException',
'assertFalse',
'assertNone',
'assertNotNone',
'assertSame',
'assertNotSame',
'assertTrue'
)
def colon( msg ):
if msg:
return ": " + str( msg )
else:
return ""
def assertEquals( exp, got, msg = None ):
"""assertEquals( exp, got[, message] )
Two objects test as "equal" if:
* they are the same object as tested by the 'is' operator.
* either object is a float or complex number and the absolute
value of the difference between the two is less than 1e-8.
* applying the equals operator ('==') returns True.
"""
if exp is got:
r = True
elif ( type( exp ) in ( FloatType, ComplexType ) or
type( got ) in ( FloatType, ComplexType ) ):
r = abs( exp - got ) < 1e-8
else:
r = ( exp == got )
if not r:
print >>sys.stderr, "Error: expected <%s> but got <%s>%s" % ( repr( exp ), repr( got ), colon( msg ) )
traceback.print_stack()
def assertNotEquals( exp, got, msg = None ):
"""assertNotEquals( exp, got[, message] )
Two objects test as "equal" if:
* they are the same object as tested by the 'is' operator.
* either object is a float or complex number and the absolute
value of the difference between the two is less than 1e-8.
* applying the equals operator ('==') returns True.
"""
if exp is got:
r = False
elif ( type( exp ) in ( FloatType, ComplexType ) or
type( got ) in ( FloatType, ComplexType ) ):
r = abs( exp - got ) >= 1e-8
else:
r = ( exp != got )
if not r:
print >>sys.stderr, "Error: expected different values but both are equal to <%s>%s" % ( repr( exp ), colon( msg ) )
traceback.print_stack()
def assertException( exceptionType, f, msg = None ):
"""Assert that an exception of type \var{exceptionType}
is thrown when the function \var{f} is evaluated.
"""
try:
f()
except exceptionType:
assert True
else:
print >>sys.stderr, "Error: expected <%s> to be thrown by function%s" % ( exceptionType.__name__, colon( msg ) )
traceback.print_stack()
def assertFalse( b, msg = None ):
"""assertFalse( b[, message] )
"""
if b:
print >>sys.stderr, "Error: expected value to be False%s" % colon( msg )
traceback.print_stack()
def assertNone( x, msg = None ):
assertSame( None, x, msg )
def assertNotNone( x, msg = None ):
assertNotSame( None, x, msg )
def assertSame( exp, got, msg = None ):
if got is not exp:
print >>sys.stderr, "Error: expected <%s> to be the same object as <%s>%s" % ( repr( exp ), repr( got ), colon( msg ) )
traceback.print_stack()
def assertNotSame( exp, got, msg = None ):
if got is exp:
print >>sys.stderr, "Error: expected two distinct objects but both are the same object <%s>%s" % ( repr( exp ), colon( msg ) )
traceback.print_stack()
def assertTrue( b, msg = None ):
if not b:
print >>sys.stderr, "Error: expected value to be True%s" % colon( msg )
traceback.print_stack()
if __name__ == "__main__":
assertNone( None )
assertEquals( 5, 5 )
assertException( KeyError, lambda: {}['test'] )
assertNone( 5, 'this assertion is expected' )
assertEquals( 5, 6, 'this assertion is expected' )
assertException( KeyError, lambda: {}, 'this assertion is expected' )
|
[
"[email protected]"
] | |
294780ff7ab60dc91677fc1d89295b77c146b850
|
4a53aba78d55247e185d8cef5e2a1f8892ae68be
|
/learn_python/08.jpype.py
|
13917ac8e31a1518b4553ebf02a516bd1b6ee5af
|
[] |
no_license
|
axu4github/Learn
|
665bb8ddd2eb420a0e7bc3d1ff68f66958936645
|
2eb33b5a97f1730e3f774b80e3b206c49faa2228
|
refs/heads/master
| 2023-01-22T15:49:53.260777 | 2018-10-25T15:21:56 | 2018-10-25T15:21:56 | 61,703,577 | 1 | 0 | null | 2023-01-12T08:23:28 | 2016-06-22T08:46:46 |
JavaScript
|
UTF-8
|
Python
| false | false | 664 |
py
|
# encoding=utf-8
'''
Mac安装JPype
1. 下载 https://sourceforge.net/projects/jpype/ 最新版本
2. 解压,进入目录
3. 执行 sudo python setup.py install
若存在 `error: command 'clang' failed with exit status 1` 的问题
则需要在 setup.py 文件的 `self.includeDirs` 中添加 `"/System/Library/Frameworks/JavaVM.framework/Versions/A/Headers/"` 以便可以找到 `jni.h` 等头文件。
具体可详见:http://blog.csdn.net/jerrychenly/article/details/20545995 说明
'''
from jpype import *
startJVM('/Library/Java/JavaVirtualMachines/jdk1.7.0_09.jdk/Contents/MacOS/libjli.dylib')
java.lang.System.out.println("hello world")
shutdownJVM()
|
[
"[email protected]"
] | |
746b0c743ffb8d49b2ff71cf870102d7d7279481
|
5cea76d53779d466f19a5cf0b51e003586cc4a7b
|
/py4ops/getip.py
|
e2bbe141b3b44d2c61828b6bf996715b6e854f17
|
[] |
no_license
|
evan886/python
|
40152fdb4885876189580141abe27a983d04e04d
|
d33e996e93275f6b347ecc2d30f8efe05accd10c
|
refs/heads/master
| 2021-06-28T12:35:10.793186 | 2021-05-26T14:33:40 | 2021-05-26T14:33:40 | 85,560,342 | 2 | 1 | null | 2017-10-11T05:31:06 | 2017-03-20T09:51:50 |
JavaScript
|
UTF-8
|
Python
| false | false | 599 |
py
|
#!/usr/bin/python
#-*- coding:utf-8 -*-
import socket
import fcntl
import struct
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
print get_ip_address('eth0')
'''
send email 要用到这个ip 的东西
>>> get_ip_address('eth0')
'38.113.228.130'
http://blog.csdn.net/heizistudio/article/details/38413739
使用Python获得本机IP地址
http://www.pythonclub.org/python-network-application/get-ip-address
'''
|
[
"[email protected]"
] | |
be3cab903221403283dcb433087d1d2770b819c1
|
2aec9c5e8c72b731d3abf22f2a407fe09c1cde09
|
/SipopPolicyImg/SipopPolicyImg/settings.py
|
dc0ee7131688caeaf0e766dc8fe3b44d9a87787e
|
[] |
no_license
|
jiangyg/ZWFproject
|
8b24cc34970ae0a9c2a2b0039dc527c83a5862b5
|
aa35bc59566d92721f23d2dd00b0febd268ac2dd
|
refs/heads/master
| 2020-09-26T17:01:00.229380 | 2019-11-15T13:16:21 | 2019-11-15T13:16:21 | 226,297,631 | 0 | 1 | null | 2019-12-06T09:55:37 | 2019-12-06T09:55:36 | null |
UTF-8
|
Python
| false | false | 2,169 |
py
|
# Scrapy settings for SipopPolicyImg project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/topics/settings.html
#
SPIDER_MODULES = ['SipopPolicyImg.spiders']
NEWSPIDER_MODULE = 'SipopPolicyImg.spiders'
ROBOTSTXT_OBEY = False
# USER_AGENT = 'scrapy-redis (+https://github.com/rolando/scrapy-redis)'
DUPEFILTER_CLASS = "scrapy_redis.dupefilter.RFPDupeFilter"
SCHEDULER = "scrapy_redis.scheduler.Scheduler"
SCHEDULER_PERSIST = True
SCHEDULER_QUEUE_CLASS = "scrapy_redis.queue.SpiderPriorityQueue"
#SCHEDULER_QUEUE_CLASS = "scrapy_redis.queue.SpiderQueue"
#SCHEDULER_QUEUE_CLASS = "scrapy_redis.queue.SpiderStack"
ITEM_PIPELINES = {
'SipopPolicyImg.pipelines.SipoppolicyPipeline': 300,
'scrapy_redis.pipelines.RedisPipeline': 400,
}
LOG_LEVEL = 'DEBUG'
DOWNLOADER_MIDDLEWARES = {
'SipopPolicyImg.middlewares.RandMiddleware': 543,
'scrapy.contrib.downloadermiddleware.useragent.UserAgentMiddleware': None,
}
REDIS_HOST = '127.0.0.1' # 也可以根据情况改成 localhost
REDIS_PORT = 6379
REDIS_PARAMS = {
'db': 15,
}
# Introduce an artifical delay to make use of parallelism. to speed up the
# crawl.
DOWNLOAD_DELAY = 1
CONCURRENT_REQUESTS = 8
RETRY_ENABLED = True # 重试中间件 指定关闭 默认为 True 是开启状态
RETRY_HTTP_CODES = [302] # 指定要重试的 HTTP 状态码,其它错误会被丢弃
RETRY_TIMES = 10 # 指定重试次数
AUTOTHROTTLE_ENABLED = True # 自动限速扩展
AUTOTHROTTLE_START_DELAY = 5.0
# 最初的下载延迟(以秒为单位)
AUTOTHROTTLE_MAX_DELAY = 60.0
# 在高延迟情况下设置的最大下载延迟(以秒为单位)
AUTOTHROTTLE_DEBUG = True
# 启用 AutoThrottle 调试模式,该模式显示收到的每个响应的统计数据,以便可以实时调节参数
AUTOTHROTTLE_TARGET_CONCURRENCY = 10
# Scrapy 应平行发送到远程网站的请求数量 将此选项设置为更高的值以增加吞吐量和远程服务器上的负载 将此选项设置为更低的值以使爬虫更保守和礼貌
HTTPERROR_ALLOWED_CODES = [302, 500, 502, 404, 403, 503]
|
[
"[email protected]"
] | |
09b541231ca7b26b86b963b1d56d20ded60d96a8
|
971e0efcc68b8f7cfb1040c38008426f7bcf9d2e
|
/tests/artificial/transf_Anscombe/trend_ConstantTrend/cycle_12/ar_/test_artificial_128_Anscombe_ConstantTrend_12__100.py
|
90590116d599656197e31e656d41ca810bd1f95f
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
antoinecarme/pyaf
|
a105d172c2e7544f8d580d75f28b751351dd83b6
|
b12db77cb3fa9292e774b2b33db8ce732647c35e
|
refs/heads/master
| 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 |
BSD-3-Clause
| 2023-03-08T21:45:40 | 2016-10-13T09:30:30 |
Python
|
UTF-8
|
Python
| false | false | 269 |
py
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 12, transform = "Anscombe", sigma = 0.0, exog_count = 100, ar_order = 0);
|
[
"[email protected]"
] | |
62476df0b7f7d2e336afdd3147f644b538346cf3
|
6e9c127bd6705a8b92f240ca663163504b86cd81
|
/elecsim/reinforcement_learning/__init__.py
|
c81092b6f862e68895065cc1c34baacd81f097c9
|
[
"MIT"
] |
permissive
|
alexanderkell/elecsim
|
239ffd539d1b04f24186ddaae20ac4ce6b258c03
|
df9ea14cbc8dd3fd4302be9274cb6ea61c0cdb10
|
refs/heads/master
| 2023-04-06T10:03:35.367411 | 2023-04-05T16:52:16 | 2023-04-05T16:52:16 | 124,561,430 | 36 | 10 |
MIT
| 2022-12-08T01:57:45 | 2018-03-09T15:55:53 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 229 |
py
|
"""
File name: __init__.py
Date created: 01/03/2019
Feature: #Enter feature description here
"""
__author__ = "Alexander Kell"
__copyright__ = "Copyright 2018, Alexander Kell"
__license__ = "MIT"
__email__ = "[email protected]"
|
[
"[email protected]"
] | |
2b8a2244e4e8b8f8a97d5cbe7d0d579cd8508d15
|
bc1525a4e85e49829ccbf7cfc9db6881790fa3a7
|
/pyUbiForge/ACU/type_readers/788BAA0D.py
|
cf65a21e511965a7960ee2750c94202ea6eb269d
|
[] |
no_license
|
christianbethel1993/ACExplorer
|
5183228c1c1beb5d7a3c768f5c8345e00e38f82f
|
f343de8925e0ca08aff7d2719c5e885dc3e503ac
|
refs/heads/master
| 2023-03-26T13:57:15.390140 | 2019-07-02T12:05:23 | 2019-07-02T12:05:23 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 373 |
py
|
from pyUbiForge.misc.file_object import FileObjectDataWrapper
from pyUbiForge.misc.file_readers import BaseReader
class Reader(BaseReader):
file_type = '788BAA0D'
def __init__(self, file_object_data_wrapper: FileObjectDataWrapper):
for _ in range(4):
for _ in range(4):
file_object_data_wrapper.read_float_32()
file_object_data_wrapper.out_file_write('\n')
|
[
"[email protected]"
] | |
1ccbc85a8f20324d99f2b2eb30db1d21ed445f07
|
1f51c4e89a71ea3fcc2cc921613aacc19e078b69
|
/16_Cleaning Data in Python [Part - 1]/02_Text and categorical data problems/06_Removing titles and taking names.py
|
e3da04a5987946e045f113e8723ed5864fb6283b
|
[
"MIT"
] |
permissive
|
CodeHemP/CAREER-TRACK-Data-Scientist-with-Python
|
871bafbd21c4e754beba31505965572dd8457adc
|
13ebb10cf9083343056d5b782957241de1d595f9
|
refs/heads/main
| 2023-03-26T08:43:37.054410 | 2021-03-22T15:08:12 | 2021-03-22T15:08:12 | 471,015,287 | 1 | 0 |
MIT
| 2022-03-17T13:52:32 | 2022-03-17T13:52:31 | null |
UTF-8
|
Python
| false | false | 1,407 |
py
|
'''
06 - Removing titles and taking names
While collecting survey respondent metadata in the airlines DataFrame,
the full name of respondents was saved in the full_name column. However
upon closer inspection, you found that a lot of the different names are
prefixed by honorifics such as "Dr.", "Mr.", "Ms." and "Miss".
Your ultimate objective is to create two new columns named first_name and
last_name, containing the first and last names of respondents respectively.
Before doing so however, you need to remove honorifics.
The airlines DataFrame is in your environment, alongside pandas as pd.
Instructions:
- Remove "Dr.", "Mr.", "Miss" and "Ms." from full_name by replacing them with
an empty string "" in that order.
- Run the assert statement using .str.contains() that tests whether full_name
still contains any of the honorifics.
'''
# Replace "Dr." with empty string ""
airlines['full_name'] = airlines['full_name'].str.replace("Dr.", "")
# Replace "Mr." with empty string ""
airlines['full_name'] = airlines['full_name'].str.replace("Mr.", "")
# Replace "Miss" with empty string ""
airlines['full_name'] = airlines['full_name'].str.replace("Miss", "")
# Replace "Ms." with empty string ""
airlines['full_name'] = airlines['full_name'].str.replace("Ms.", "")
# Assert that full_name has no honorifics
assert airlines['full_name'].str.contains('Ms.|Mr.|Miss|Dr.').any() == False
|
[
"[email protected]"
] | |
3cd902ce5209b6c7863f07b1602b49859de1031e
|
4d2475135f5fc9cea73572b16f59bfdc7232e407
|
/prob224_basic_calculator.py
|
e775df1d9d3e1655e6652d7439cd899e9757ac9c
|
[] |
no_license
|
Hu-Wenchao/leetcode
|
5fa0ae474aadaba372756d234bc5ec397c8dba50
|
31b2b4dc1e5c3b1c53b333fe30b98ed04b0bdacc
|
refs/heads/master
| 2021-06-24T04:57:45.340001 | 2017-06-17T02:33:09 | 2017-06-17T02:33:09 | 45,328,724 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 954 |
py
|
"""
Implement a basic calculator to evaluate a simple expression string.
The expression string may contain open ( and closing parentheses ),
the plus + or minus sign -, non-negative integers and empty spaces .
You may assume that the given expression is always valid.
Some examples:
"1 + 1" = 2
" 2-1 + 2 " = 3
"(1+(4+5+2)-3)+(6+8)" = 23
"""
class Solution(object):
def calculate(self, s):
"""
:type s: str
:rtype: int
"""
total = 0
i, signs = 0, [1, 1]
while i < len(s):
c = s[i]
if c.isdigit():
start = i
while i < len(s) and s[i].isdigit():
i += 1
total += signs.pop() * int(s[start:i])
continue
if c in '+-(':
signs.append(signs[-1] * (1, -1)[c == '-'])
elif c == ')':
signs.pop()
i += 1
return total
|
[
"[email protected]"
] | |
78cc2c0e6ce7233a114f720346695cd17917852a
|
f6c051b15e29fbf1501499d5551c0d9237da0852
|
/order/migrations/0008_auto_20210108_0304.py
|
8fa64fe7e7a2df8ee6ded7dac0bf581c23033732
|
[] |
no_license
|
Deepjyoti13/eCommerce
|
8e672d2c4b6f708ef4ac1b66521ce72d2fe2cc39
|
b0745b8c3a410f7ee8182496c556229748fd3265
|
refs/heads/master
| 2023-02-25T04:00:47.068320 | 2021-01-24T19:21:13 | 2021-01-24T19:21:13 | 321,396,947 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 560 |
py
|
# Generated by Django 3.1.4 on 2021-01-07 21:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('order', '0007_order_tracking'),
]
operations = [
migrations.AlterField(
model_name='order',
name='status',
field=models.CharField(choices=[('New', 'New'), ('Accepted', 'Accepted'), ('Preparing', 'Preparing'), ('On Shipping', 'On Shipping'), ('Completed', 'Completed'), ('Canceled', 'Canceled')], default='New', max_length=50),
),
]
|
[
"[email protected]"
] | |
f1c57d57d4175a117e64f2370c9b2da75032aefd
|
5cea76d53779d466f19a5cf0b51e003586cc4a7b
|
/project/chapter29squish/config.py
|
c0a9c78f74009e17e69d5662bf33698d71ab2bc1
|
[] |
no_license
|
evan886/python
|
40152fdb4885876189580141abe27a983d04e04d
|
d33e996e93275f6b347ecc2d30f8efe05accd10c
|
refs/heads/master
| 2021-06-28T12:35:10.793186 | 2021-05-26T14:33:40 | 2021-05-26T14:33:40 | 85,560,342 | 2 | 1 | null | 2017-10-11T05:31:06 | 2017-03-20T09:51:50 |
JavaScript
|
UTF-8
|
Python
| false | false | 713 |
py
|
# Configuration file for Squish
# -----------------------------
# Feel free to modify the configuration variables below to taste.
# If the game is too fast or too slow, try to modify the speed
# variables.
# Change these to use other images in the game:
banana_image = 'banana.png'
weight_image = 'weight.png'
splash_image = 'weight.png'
#splash_image = 'banana.png'
# Change these to affect the general appearance:
screen_size = 800, 600
background_color = 255, 255, 255
margin = 30
full_screen = 0
#full_screen = 1
#font_size = 68
font_size = 48
# These affect the behavior of the game:
drop_speed = 1
banana_speed = 1
speed_increase = 1
weights_per_level = 10
banana_pad_top = 40
banana_pad_side = 20
|
[
"[email protected]"
] | |
9fcb80ebf6ba49d19469342df5512714fae0445e
|
c7cbbd4b1c1e281cef5f4a0c4e3d4a97cee2241e
|
/froide/accesstoken/views.py
|
f7bfb75707d18ad131e30d2a36f10fd8cc21fc26
|
[
"MIT"
] |
permissive
|
manonthemat/froide
|
078cf78a6eb35226512c0bdfa2ac9043bcc81ad9
|
698c49935eaf2e922f3c9f6a46af0fd545ccbbbb
|
refs/heads/master
| 2020-08-14T08:19:36.215473 | 2019-10-14T19:43:16 | 2019-10-14T19:43:16 | 215,129,869 | 0 | 0 |
MIT
| 2019-10-14T19:35:49 | 2019-10-14T19:35:49 | null |
UTF-8
|
Python
| false | false | 669 |
py
|
from django.views.decorators.http import require_POST
from django.utils.translation import ugettext_lazy as _
from django.contrib import messages
from froide.helper.utils import render_403, get_redirect
from .forms import ResetTokenForm
@require_POST
def reset_token(request):
if not request.user.is_authenticated:
return render_403(request)
form = ResetTokenForm(data=request.POST, user=request.user)
if form.is_valid():
message = form.save()
messages.add_message(request, messages.SUCCESS, message)
else:
messages.add_message(request, messages.ERROR, _('Failed to reset token.'))
return get_redirect(request)
|
[
"[email protected]"
] | |
990b85fea581c3710a827f71f87d0f2bc9447d5f
|
ef54d37f8a3303013ca7469871a320d303957ed7
|
/robo4.2/4.2/lib/python2.7/site-packages/robot/libraries/dialogs_py.py
|
252b6948049aa0b01ab2f54bdf4e68c57aabb39a
|
[] |
no_license
|
richa92/Jenkin_Regression_Testing
|
d18badfcf16bda682dfe7bcbbd66f54a9a27a58d
|
24a74926170cbdfafa47e972644e2fe5b627d8ff
|
refs/heads/master
| 2020-07-12T10:01:59.099137 | 2019-08-27T12:14:53 | 2019-08-27T12:14:53 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,703 |
py
|
# Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from threading import currentThread
import time
try:
# from Tkinter import (Button, Entry, Frame, Label, Listbox, TclError,
Toplevel, Tk, BOTH, END, LEFT, W)
except ImportError:
# from tkinter import (Button, Entry, Frame, Label, Listbox, TclError,
Toplevel, Tk, BOTH, END, LEFT, W)
class _TkDialog(Toplevel):
_left_button = 'OK'
_right_button = 'Cancel'
def __init__(self, message, value=None, **extra):
self._prevent_execution_with_timeouts()
self._parent = self._get_parent()
Toplevel.__init__(self, self._parent)
self._initialize_dialog()
self._create_body(message, value, **extra)
self._create_buttons()
self._result = None
def _prevent_execution_with_timeouts(self):
if 'linux' not in sys.platform \
and currentThread().getName() != 'MainThread':
raise RuntimeError('Dialogs library is not supported with '
'timeouts on Python on this platform.')
def _get_parent(self):
parent = Tk()
parent.withdraw()
return parent
def _initialize_dialog(self):
self.title('Robot Framework')
self.grab_set()
self.protocol("WM_DELETE_WINDOW", self._close)
self.bind("<Escape>", self._close)
self.minsize(250, 80)
self.geometry("+%d+%d" % self._get_center_location())
self._bring_to_front()
def grab_set(self, timeout=30):
maxtime = time.time() + timeout
while time.time() < maxtime:
try:
# Fails at least on Linux if mouse is hold down.
return Toplevel.grab_set(self)
except TclError:
pass
raise RuntimeError('Failed to open dialog in %s seconds. One possible '
'reason is holding down mouse button.' % timeout)
def _get_center_location(self):
x = (self.winfo_screenwidth() - self.winfo_reqwidth()) // 2
y = (self.winfo_screenheight() - self.winfo_reqheight()) // 2
return x, y
def _bring_to_front(self):
self.attributes('-topmost', True)
self.attributes('-topmost', False)
def _create_body(self, message, value, **extra):
frame = Frame(self)
Label(frame, text=message, anchor=W, justify=LEFT, wraplength=800).pack(fill=BOTH)
selector = self._create_selector(frame, value, **extra)
if selector:
selector.pack(fill=BOTH)
selector.focus_set()
frame.pack(padx=5, pady=5, expand=1, fill=BOTH)
def _create_selector(self, frame, value):
return None
def _create_buttons(self):
frame = Frame(self)
self._create_button(frame, self._left_button,
self._left_button_clicked)
self._create_button(frame, self._right_button,
self._right_button_clicked)
frame.pack()
def _create_button(self, parent, label, callback):
if label:
button = Button(parent, text=label, width=10, command=callback)
button.pack(side=LEFT, padx=5, pady=5)
def _left_button_clicked(self, event=None):
if self._validate_value():
self._result = self._get_value()
self._close()
def _validate_value(self):
return True
def _get_value(self):
return None
def _close(self, event=None):
# self.destroy() is not enough on Linux
self._parent.destroy()
def _right_button_clicked(self, event=None):
self._result = self._get_right_button_value()
self._close()
def _get_right_button_value(self):
return None
def show(self):
self.wait_window(self)
return self._result
class MessageDialog(_TkDialog):
_right_button = None
class InputDialog(_TkDialog):
def __init__(self, message, default='', hidden=False):
_TkDialog.__init__(self, message, default, hidden=hidden)
def _create_selector(self, parent, default, hidden):
self._entry = Entry(parent, show='*' if hidden else '')
self._entry.insert(0, default)
self._entry.select_range(0, END)
return self._entry
def _get_value(self):
return self._entry.get()
class SelectionDialog(_TkDialog):
def __init__(self, message, values):
_TkDialog.__init__(self, message, values)
def _create_selector(self, parent, values):
self._listbox = Listbox(parent)
for item in values:
self._listbox.insert(END, item)
self._listbox.config(width=0)
return self._listbox
def _validate_value(self):
return bool(self._listbox.curselection())
def _get_value(self):
return self._listbox.get(self._listbox.curselection())
class PassFailDialog(_TkDialog):
_left_button = 'PASS'
_right_button = 'FAIL'
def _get_value(self):
return True
def _get_right_button_value(self):
return False
|
[
"[email protected]"
] | |
35dfe86db227e3ebcc4020419a9b458da4804d07
|
7bededcada9271d92f34da6dae7088f3faf61c02
|
/pypureclient/flashblade/FB_2_8/models/quota_setting_response.py
|
911d109050831d4426d6f40b7851420fcdcd0f2a
|
[
"BSD-2-Clause"
] |
permissive
|
PureStorage-OpenConnect/py-pure-client
|
a5348c6a153f8c809d6e3cf734d95d6946c5f659
|
7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e
|
refs/heads/master
| 2023-09-04T10:59:03.009972 | 2023-08-25T07:40:41 | 2023-08-25T07:40:41 | 160,391,444 | 18 | 29 |
BSD-2-Clause
| 2023-09-08T09:08:30 | 2018-12-04T17:02:51 |
Python
|
UTF-8
|
Python
| false | false | 3,181 |
py
|
# coding: utf-8
"""
FlashBlade REST API
A lightweight client for FlashBlade REST API 2.8, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flashblade.FB_2_8 import models
class QuotaSettingResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'items': 'list[QuotaSetting]'
}
attribute_map = {
'items': 'items'
}
required_args = {
}
def __init__(
self,
items=None, # type: List[models.QuotaSetting]
):
"""
Keyword args:
items (list[QuotaSetting]): A list of quota settings objects.
"""
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `QuotaSettingResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
return None
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(QuotaSettingResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, QuotaSettingResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
605cceafb7745578da0e5040c7db03dccc5e5ffc
|
c5b69745b12ad36241fa792af44480eb70918cb0
|
/ibis/tests/expr/test_literal.py
|
d43226d69d94263d4b54d263c8d01711f06d49e2
|
[
"Apache-2.0"
] |
permissive
|
vishalbelsare/ibis
|
bdbfde79086b268f4592cde009e0ffa52ece97e6
|
3fd6afb223fa442ccd0d9db69a74a431d5e7bcca
|
refs/heads/master
| 2023-08-16T16:23:00.535467 | 2021-11-23T15:33:35 | 2021-11-23T15:33:35 | 141,067,229 | 0 | 0 |
Apache-2.0
| 2021-11-29T20:12:43 | 2018-07-16T00:06:27 |
Python
|
UTF-8
|
Python
| false | false | 1,388 |
py
|
import ibis
from ibis.expr import datatypes
from ibis.expr.operations import Literal
from ibis.tests.util import assert_pickle_roundtrip
def test_literal_equality_basic():
a = ibis.literal(1).op()
b = ibis.literal(1).op()
assert a == b
assert hash(a) == hash(b)
def test_literal_equality_int_float():
# Note: This is different from the Python behavior for int/float comparison
a = ibis.literal(1).op()
b = ibis.literal(1.0).op()
assert a != b
def test_literal_equality_int16_int32():
# Note: This is different from the Python behavior for int/float comparison
a = Literal(1, datatypes.int16)
b = Literal(1, datatypes.int32)
assert a != b
def test_literal_equality_int_interval():
a = ibis.literal(1).op()
b = ibis.interval(seconds=1).op()
assert a != b
def test_literal_equality_interval():
a = ibis.interval(seconds=1).op()
b = ibis.interval(minutes=1).op()
assert a != b
# Currently these does't equal, but perhaps should be?
c = ibis.interval(seconds=60).op()
d = ibis.interval(minutes=1).op()
assert c != d
def test_pickle_literal():
a = Literal(1, datatypes.int16)
b = Literal(1, datatypes.int32)
assert_pickle_roundtrip(a)
assert_pickle_roundtrip(b)
def test_pickle_literal_interval():
a = ibis.interval(seconds=1).op()
assert_pickle_roundtrip(a)
|
[
"[email protected]"
] | |
ae33e5e64e5edcb987ff8edd262f7a45e2a61f7b
|
48c4dda8fbecb5bc9506eb0a318508c9a9f37aca
|
/deep learning from scratch.py
|
e10c1b2aa591751efd915dc08f92debac8407696
|
[] |
no_license
|
bigeyesung/DLkaggle
|
f59e8e2fdac430fd5e97cfc67e63c837a8b12cee
|
f57b10740b206ecff1bcbfdc7d4436ac8dcac28d
|
refs/heads/master
| 2023-07-05T22:16:03.042595 | 2021-08-07T15:48:54 | 2021-08-07T15:48:54 | 262,594,538 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,766 |
py
|
import numpy as np
from sklearn.model_selection import train_test_split
from tensorflow import keras
img_rows, img_cols = 28, 28
num_classes = 10
def prep_data(raw):
y = raw[:, 0]
out_y = keras.utils.to_categorical(y, num_classes)
x = raw[:,1:]
num_images = raw.shape[0]
out_x = x.reshape(num_images, img_rows, img_cols, 1)
out_x = out_x / 255
return out_x, out_y
fashion_file = "../input/fashionmnist/fashion-mnist_train.csv"
fashion_data = np.loadtxt(fashion_file, skiprows=1, delimiter=',')
x, y = prep_data(fashion_data)
# Set up code checking
from learntools.core import binder
binder.bind(globals())
from learntools.deep_learning.exercise_7 import *
print("Setup Complete")
# 1) Start the model
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Flatten, Conv2D
fashion_model= Sequential()
# Add the first layer
fashion_model.add(Conv2D(12,kernel_size=3,activation='relu',input_shape=(img_rows,img_cols,1)))
#3) Add the remaining layers
fashion_model.add(Conv2D(20,kernel_size=3,activation='relu'))
fashion_model.add(Conv2D(20,kernel_size=3,activation='relu'))
fashion_model.add(Flatten())
fashion_model.add(Dense(100,activation='relu'))
fashion_model.add(Dense(num_classes, activation='softmax'))
# 4) Compile Your Model
fashion_model.compile(loss=keras.losses.categorical_crossentropy,optimizer='adam',metrics=['accuracy'])
# 5) Fit The Model
fashion_model.fit(x,y,batch_size=100,epochs=4,validation_split=0.2)
# 6) Create A New Model
second_fashion_model = Sequential()
second_fashion_model.add(Conv2D(12,
activation='relu',
kernel_size=3,
input_shape = (img_rows, img_cols, 1)))
# Changed kernel sizes to be 2
second_fashion_model.add(Conv2D(20, activation='relu', kernel_size=2))
second_fashion_model.add(Conv2D(20, activation='relu', kernel_size=2))
# added an addition Conv2D layer
second_fashion_model.add(Conv2D(20, activation='relu', kernel_size=2))
second_fashion_model.add(Flatten())
second_fashion_model.add(Dense(100, activation='relu'))
# It is important not to change the last layer. First argument matches number of classes. Softmax guarantees we get reasonable probabilities
second_fashion_model.add(Dense(10, activation='softmax'))
second_fashion_model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
second_fashion_model.fit(x, y, batch_size=100, epochs=4, validation_split=0.2)
#second_fashion_model.add(Conv2D(30,kernel_size=3,activation='relu',input_shape=(img_rows,img_cols,1)))
#second_fashion_model.fit(x,y,batch_size=100,epochs=4,validation_split=0.2)
|
[
"[email protected]"
] | |
9cdfc43db870a09854c65404a963963d2cb4b43d
|
bbf744bfbfd9a935bd98c7cf54152a5d41194161
|
/chapter_15/die_visual.py
|
d9629d134497d4af77867b78e009e95a6471a52b
|
[] |
no_license
|
terranigmark/python-crash-course-projects
|
65a7863be2d26fe8b91ac452b12203386eb0259a
|
79ed9ed8e6a1bf015990a9556689379274231d13
|
refs/heads/master
| 2022-12-05T21:59:00.352140 | 2020-08-21T04:59:50 | 2020-08-21T04:59:50 | 266,263,493 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 918 |
py
|
from plotly.graph_objs import Bar, Layout
from plotly import offline
from die import Die
# create a D6
die_1 = Die()
die_2 = Die(10)
# make some rolls and store results in a list
results = []
for roll_num in range(50_000):
result = die_1.roll() + die_2.roll()
results.append(result)
# analyze the results
frequencies = []
max_result = die_1.num_sides + die_2.num_sides
for value in range(2, die_1.num_sides + die_2.num_sides):
frequency = results.count(value)
frequencies.append(frequency)
# visualize the results
x_values = list(range(2, max_result + 1))
data = [Bar(x = x_values, y = frequencies)]
x_axis_config = {'title': 'Result', 'dtick': 1}
y_axis_config = {'title': 'Frequency of Result'}
my_layout = Layout(title = 'Results of rolling two D6 and D10 50,000 times', xaxis = x_axis_config, yaxis = y_axis_config)
offline.plot({'data': data, 'layout': my_layout}, filename = 'd6_d10.html')
|
[
"[email protected]"
] | |
cc0f68e8359cb95579b7d20bc6c3581cdc712cbd
|
5e5e99e8493fbef64847494caf059c910c03c823
|
/arrays/palindromic-substrings.py
|
b398b82336fa0371d58df3ab24c16dec63daf978
|
[] |
no_license
|
jcockbain/leetcode-python
|
f4e487b13ae4cacef9cbedfd4358f8ee0006e2b8
|
d7f83ea5a11e4c8340c48698d29aa3bc0b942121
|
refs/heads/master
| 2020-07-09T19:58:42.933881 | 2019-10-28T23:34:34 | 2019-10-28T23:34:34 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 362 |
py
|
def countSubstrings(self, s):
"""
:type s: str
:rtype: int
"""
N = len(s)
ans = 0
for center in range(2*N - 1):
left = center / 2
right = left + center % 2
while left >= 0 and right < N \
and s[left] == s[right]:
ans += 1
left -= 1
right += 1
return ans
|
[
"[email protected]"
] | |
0ba3ed98a522196a66863cdd0ce816654065b1b2
|
96a34a048c783a75736bf0ec775df22142f9ee53
|
/services/web/server/src/simcore_service_webserver/db_listener/plugin.py
|
a4fda5b69bdff33c6386eee2e702f5c74e8bbb01
|
[
"MIT"
] |
permissive
|
ITISFoundation/osparc-simcore
|
77e5b9f7eb549c907f6ba2abb14862154cc7bb66
|
f4c57ffc7b494ac06a2692cb5539d3acfd3d1d63
|
refs/heads/master
| 2023-08-31T17:39:48.466163 | 2023-08-31T15:03:56 | 2023-08-31T15:03:56 | 118,596,920 | 39 | 29 |
MIT
| 2023-09-14T20:23:09 | 2018-01-23T10:48:05 |
Python
|
UTF-8
|
Python
| false | false | 870 |
py
|
"""
computation module is the main entry-point for computational backend
"""
import logging
from aiohttp import web
from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup
from ..db.plugin import setup_db
from ..projects.db import setup_projects_db
from ..rabbitmq import setup_rabbitmq
from ..socketio.plugin import setup_socketio
from ._db_comp_tasks_listening_task import create_comp_tasks_listening_task
_logger = logging.getLogger(__name__)
@app_module_setup(
__name__,
ModuleCategory.ADDON,
settings_name="WEBSERVER_DB_LISTENER",
logger=_logger,
)
def setup_db_listener(app: web.Application):
setup_rabbitmq(app)
setup_socketio(app)
setup_projects_db(app)
# Creates a task to listen to comp_task pg-db's table events
setup_db(app)
app.cleanup_ctx.append(create_comp_tasks_listening_task)
|
[
"[email protected]"
] | |
2671fbfa345590729a83bef8261428be9a1bf018
|
f8d5c4eb0244c4a227a615bc11c4c797760c3bec
|
/utils/rldraw.py
|
2e944f936c3ad5ea6d074a6f0f9d74759cdd0c70
|
[] |
no_license
|
SamPlvs/reinforcement_learning_pytorch
|
e9b84659f870d938814177f1288fa4a2eb152599
|
ffb9e53eeff011c4d3d5933a60c2b65fdbb18e2a
|
refs/heads/master
| 2020-03-23T04:08:51.778325 | 2018-01-16T22:36:48 | 2018-01-16T22:36:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 475 |
py
|
import matplotlib.pyplot as plt
import numpy as np
def reward_episode(rewards, image_path, env_name='', method_name='', comment=''):
reward_list = rewards
total_num = np.shape(reward_list)[0]
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(list(range(total_num)), reward_list)
ax.set_xlabel('iteration')
ax.set_ylabel('rewards')
fig.suptitle("rewards_episodes_{}_{}_{}".format(env_name, method_name, comment))
fig.savefig(image_path)
|
[
"[email protected]"
] | |
5a75b3e5fcce03f7bd10d309196f67bdbc85c252
|
1d641f71f7aab082ed0b3ee805d6ff24b012ca2d
|
/ecommerce/carts/urls.py
|
aacdcfc353ac76fe4c2a60b52d83aa8708090caa
|
[] |
no_license
|
Arkajit-m18/django-mca-major-project
|
3d63ac96cd32c49e9a95629a680c5b0b7561cbd3
|
59b6f39d923a7e134bbb4bbb769bc06721321760
|
refs/heads/master
| 2020-05-18T00:31:44.435948 | 2019-05-15T15:23:21 | 2019-05-15T15:23:21 | 184,065,280 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 334 |
py
|
from django.urls import path
from . import views
app_name = 'carts'
urlpatterns = [
path('', views.cart_home, name = 'cart_home'),
path('update/', views.cart_update, name = 'cart_update'),
path('checkout/', views.checkout_home, name = 'checkout'),
path('checkout/success/', views.checkout_done, name = 'success'),
]
|
[
"[email protected]"
] | |
ca0d04658eb03c43a7dceddf7338d8c1f5cd372f
|
346cf248e94fe97ba9c0a841827ab77f0ed1ff20
|
/experiments/kdd-exps/experiment_DynaQtable_130_Feb14_0029.py
|
efabd8516978796f715bed1b20adcd12deaf5f2b
|
[
"BSD-3-Clause"
] |
permissive
|
huangxf14/deepnap
|
cae9c7c654223f6202df05b3c3bc5053f9bf5696
|
b4627ce1b9022d4f946d9b98d8d1622965cb7968
|
refs/heads/master
| 2020-03-26T02:54:01.352883 | 2018-08-12T01:55:14 | 2018-08-12T01:55:14 | 144,429,728 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,623 |
py
|
# System built-in modules
import time
from datetime import datetime
import sys
import os
from multiprocessing import Pool
# Project dependency modules
import pandas as pd
pd.set_option('mode.chained_assignment', None) # block warnings due to DataFrame value assignment
import lasagne
# Project modules
sys.path.append('../')
from sleep_control.traffic_emulator import TrafficEmulator
from sleep_control.traffic_server import TrafficServer
from sleep_control.controller import QController, DummyController, NController
from sleep_control.integration import Emulation
from sleep_control.env_models import SJTUModel
from rl.qtable import QAgent
from rl.qnn_theano import QAgentNN
from rl.mixin import PhiMixin, DynaMixin
sys_stdout = sys.stdout
log_prefix = '_'.join(['msg'] + os.path.basename(__file__).replace('.', '_').split('_')[1:5])
log_file_name = "{}_{}.log".format(log_prefix, sys.argv[1])
# Composite classes
class Dyna_QAgent(DynaMixin, QAgent):
def __init__(self, **kwargs):
super(Dyna_QAgent, self).__init__(**kwargs)
# Parameters
# |- Data
location = 'gym'
# |- Agent
# |- QAgent
actions = [(True, None), (False, 'serve_all')]
gamma, alpha = 0.9, 0.9 # TD backup
explore_strategy, epsilon = 'epsilon', 0.02 # exploration
# |- QAgentNN
# | - No Phi
phi_length = 0
dim_state = (1, 1, 3)
range_state = ((((0, 10), (0, 10), (0, 10)),),)
# | - Other params
momentum, learning_rate = 0.9, 0.01 # SGD
num_buffer, memory_size, batch_size, update_period, freeze_period = 2, 200, 100, 4, 16
reward_scaling, reward_scaling_update, rs_period = 1, 'adaptive', 32 # reward scaling
# |- Env model
model_type, traffic_window_size = 'IPP', 50
stride, n_iter, adjust_offset = 2, 3, 1e-22
eval_period, eval_len = 4, 100
n_belief_bins, max_queue_len = 5, 20
Rs, Rw, Rf, Co, Cw = 1.0, -1.0, -10.0, -5.0, -0.5
traffic_params = (model_type, traffic_window_size,
stride, n_iter, adjust_offset,
eval_period, eval_len,
n_belief_bins)
queue_params = (max_queue_len,)
beta = 0.5 # R = (1-beta)*ServiceReward + beta*Cost
reward_params = (Rs, Rw, Rf, Co, Cw, beta)
# |- DynaQ
num_sim = 5
# |- Env
# |- Time
start_time = pd.to_datetime("2014-10-15 09:40:00")
total_time = pd.Timedelta(days=7)
time_step = pd.Timedelta(seconds=2)
backoff_epochs = num_buffer*memory_size+phi_length
head_datetime = start_time - time_step*backoff_epochs
tail_datetime = head_datetime + total_time
TOTAL_EPOCHS = int(total_time/time_step)
# |- Reward
rewarding = {'serve': Rs, 'wait': Rw, 'fail': Rf}
# load from processed data
session_df =pd.read_csv(
filepath_or_buffer='../data/trace_{}.dat'.format(location),
parse_dates=['startTime_datetime', 'endTime_datetime']
)
te = TrafficEmulator(
session_df=session_df, time_step=time_step,
head_datetime=head_datetime, tail_datetime=tail_datetime,
rewarding=rewarding,
verbose=2)
ts = TrafficServer(cost=(Co, Cw), verbose=2)
env_model = SJTUModel(traffic_params, queue_params, reward_params, 2)
agent = Dyna_QAgent(
env_model=env_model, num_sim=num_sim,
# Below is QAgent params
actions=actions, alpha=alpha, gamma=gamma,
explore_strategy=explore_strategy, epsilon=epsilon,
verbose=2)
c = QController(agent=agent)
emu = Emulation(te=te, ts=ts, c=c, beta=beta)
# Heavyliftings
t = time.time()
sys.stdout = sys_stdout
log_path = './log/'
if os.path.isfile(log_path+log_file_name):
print "Log file {} already exist. Experiment cancelled.".format(log_file_name)
else:
log_file = open(log_path+log_file_name,"w")
print datetime.now().strftime('[%Y-%m-%d %H:%M:%S]'),
print '{}%'.format(int(100.0*emu.epoch/TOTAL_EPOCHS)),
print log_file_name
time.sleep(1)
sys.stdout = log_file
while emu.epoch is not None and emu.epoch<TOTAL_EPOCHS:
# log time
print "Epoch {},".format(emu.epoch),
left = emu.te.head_datetime + emu.te.epoch*emu.te.time_step
right = left + emu.te.time_step
print "{} - {}".format(left.strftime("%Y-%m-%d %H:%M:%S"), right.strftime("%Y-%m-%d %H:%M:%S"))
emu.step()
print
if emu.epoch%(0.05*TOTAL_EPOCHS)==0:
sys.stdout = sys_stdout
print datetime.now().strftime('[%Y-%m-%d %H:%M:%S]'),
print '{}%'.format(int(100.0*emu.epoch/TOTAL_EPOCHS)),
print log_file_name
time.sleep(1)
sys.stdout = log_file
sys.stdout = sys_stdout
log_file.close()
print
print log_file_name,
print '{:.3f} sec,'.format(time.time()-t),
print '{:.3f} min'.format((time.time()-t)/60)
|
[
"[email protected]"
] | |
6fdc3db5b428914f4813bf4199befece5ed7563e
|
df4a7c46c46d1eca6570493b9707bdf64e54f8d3
|
/py/209.minimum-size-subarray-sum.py
|
adaf3f0e6093c8efaad3d2fbdcb5fae7fb66b2a1
|
[] |
no_license
|
CharmSun/my-leetcode
|
52a39bf719c507fb7032ed424fe857ba7340aea3
|
5325a56ba8c40d74d9fef2b19bac63a4e2c44a38
|
refs/heads/master
| 2023-03-29T06:39:49.614264 | 2021-03-28T16:33:52 | 2021-03-28T16:33:52 | 261,364,001 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 790 |
py
|
#
# @lc app=leetcode id=209 lang=python3
#
# [209] Minimum Size Subarray Sum
#
# @lc code=start
from typing import List
class Solution:
# 双指针移动
def minSubArrayLen(self, target: int, nums: List[int]) -> int:
if not nums:
return 0
left = 0
right = -1
sum = 0
length = len(nums) + 1
while left < len(nums) and right < len(nums):
if right < len(nums) - 1 and sum < target:
right += 1
sum += nums[right]
else:
sum -= nums[left]
left += 1
if sum >= target:
length = min(length, right - left + 1)
if length == len(nums) + 1:
return 0
return length
# @lc code=end
|
[
"[email protected]"
] | |
09228ae64537dd9fb78fcabb808a96dacec36126
|
2ab391bfaadf0743da8ffee084896b999e88482d
|
/wx.py
|
a2bd1358136ac0530889f2fe820be14236fd42ec
|
[] |
no_license
|
wean/coupon-windows
|
552a59637ea45539bdfa70c6d1bd04626f0fdbd0
|
9565b23c7f44594f182d7a268d4ed45bdeaf8dd3
|
refs/heads/master
| 2020-04-05T07:11:43.024665 | 2017-11-24T08:23:50 | 2017-11-24T08:23:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,676 |
py
|
# -*- coding:utf-8 -*-
import random
import itchat
import time
from schedule import Schedule
from search import SearchingKeyRegex
from special import Searcher
from utils import getProperty, randomSleep, reprDict
class WX(Schedule):
def __init__(self, configFile):
Schedule.__init__(self, configFile)
self.searcher = Searcher(configFile)
self.configFile = configFile
def login(self, exitCallback, uuid=None):
def isLoginned(uuid):
for count in range(10):
status = int(itchat.check_login(uuid))
if status is 200:
return True
if status is 201:
print 'Wait for confirm in mobile #', count
randomSleep(1, 2)
continue
print 'Error status:', status
return False
return False
if uuid is None:
statusFile = getProperty(self.configFile, 'wechat-status-file')
itchat.auto_login(hotReload=True, statusStorageDir=statusFile)
else:
if not isLoginned(uuid):
raise Exception('Failed to login with {}'.format(uuid))
userInfo = itchat.web_init()
itchat.show_mobile_login()
itchat.get_friends(True)
itchat.start_receiving(exitCallback)
self.me = itchat.search_friends()
print self.me['NickName'], 'is working'
self.watchFriends = list()
names = getProperty(self.configFile, 'wechat-watch-friends').split(';')
for name in names:
friends = itchat.search_friends(name=name)
self.watchFriends.extend(friends)
self.watchGroups = list()
names = getProperty(self.configFile, 'wechat-watch-groups').split(';')
for name in names:
groups = itchat.search_chatrooms(name=name)
self.watchGroups.extend(groups)
self.searchReplyPlate = getProperty(self.configFile, 'search-reply-plate')
itchat.run(blockThread=False) # Run in a new thread
self.run()
@staticmethod
def sendTo(obj, plate=None, image=None):
print '================================================================'
print 'Send a message to', obj['NickName']
if plate is not None:
interval = random.random() * 10
time.sleep(interval)
ret = obj.send(plate)
print 'Result of text message:', ret['BaseResponse']['ErrMsg']
print '----------------------------------------------------------------'
print plate
print '----------------------------------------------------------------'
if image is not None:
interval = random.random() * 10
time.sleep(interval)
ret = obj.send_image(image)
print 'Result of', image, ':', ret['BaseResponse']['ErrMsg']
print '================================================================'
def text(self, msg):
for friend in self.watchFriends:
if msg['FromUserName'] == friend['UserName']:
break
else:
return
print '================================================================'
print msg['User']['NickName'], 'sends a message:'
print '----------------------------------------------------------------'
print msg['Content']
print '================================================================'
self.search(friend, msg['Content'])
def textGroup(self, msg):
for friend in self.watchGroups:
if msg['FromUserName'] == friend['UserName']:
break
else:
return
print '================================================================'
print msg['User']['NickName'], 'sends a message:'
print '----------------------------------------------------------------'
print msg['Content']
print '================================================================'
self.search(friend, msg['Content'])
def send(self, plate, image):
for friend in self.watchFriends:
WX.sendTo(friend, plate, image)
def search(self, friend, content):
content = SearchingKeyRegex.parse(content)
if content is None:
return
print 'Searching', content
WX.sendTo(friend, self.searchReplyPlate.format(content.replace('#', ' ')))
if not self.searcher.search(content):
return
WX.sendTo(friend, self.searcher.plate, self.searcher.image)
|
[
"[email protected]"
] | |
beead89528382b978348836d26fab1b78be43800
|
26e4bea46942b9afa5a00b9cde9a84f2cc58e3c9
|
/pygame/Astar/implementation.py
|
4965fc01f99a6ab2206ed2468d00869b3bb21107
|
[] |
no_license
|
MeetLuck/works
|
46da692138cb9741a913d84eff6822f107510dc7
|
ab61175bb7e2ed5c5113bf150e0541ae18eb04c4
|
refs/heads/master
| 2020-04-12T05:40:25.143075 | 2017-08-21T17:01:06 | 2017-08-21T17:01:06 | 62,373,576 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,781 |
py
|
# Sample code from http://www.redblobgames.com/pathfinding/
# Copyright 2014 Red Blob Games <[email protected]>
#
# Feel free to use this code in your own projects, including commercial projects
# License: Apache v2.0 <http://www.apache.org/licenses/LICENSE-2.0.html>
from __future__ import print_function
class SimpleGraph:
def __init__(self):
self.edges = {}
def neighbors(self, id):
return self.edges[id]
example_graph = SimpleGraph()
example_graph.edges = {
'A': ['B'],
'B': ['A', 'C', 'D'],
'C': ['A'],
'D': ['E', 'A'],
'E': ['B']
}
import collections
class Queue:
def __init__(self):
self.elements = collections.deque()
def empty(self):
return len(self.elements) == 0
def put(self, x):
self.elements.append(x)
def get(self):
return self.elements.popleft()
# utility functions for dealing with square grids
def from_id_width(id, width):
return (id % width, id // width)
def draw_tile(graph, id, style, width):
r = "."
if 'number' in style and id in style['number']: r = "%d" % style['number'][id]
if 'point_to' in style and style['point_to'].get(id, None) is not None:
(x1, y1) = id
(x2, y2) = style['point_to'][id]
if x2 == x1 + 1: r = "\u2192"
if x2 == x1 - 1: r = "\u2190"
if y2 == y1 + 1: r = "\u2193"
if y2 == y1 - 1: r = "\u2191"
if 'start' in style and id == style['start']: r = "A"
if 'goal' in style and id == style['goal']: r = "Z"
if 'path' in style and id in style['path']: r = "@"
if id in graph.walls: r = "#" * width
return r
def draw_grid(graph, width=2, **style):
for y in range(graph.height):
for x in range(graph.width):
print("%%-%ds" % width % draw_tile(graph, (x, y), style, width), end="")
print()
# data from main article
DIAGRAM1_WALLS = [from_id_width(id, width=30) for id in [21,22,51,52,81,82,93,94,111,112,123,124,133,134,141,142,153,154,163,164,171,172,173,174,175,183,184,193,194,201,202,203,204,205,213,214,223,224,243,244,253,254,273,274,283,284,303,304,313,314,333,334,343,344,373,374,403,404,433,434]]
class SquareGrid:
def __init__(self, width, height):
self.width = width
self.height = height
self.walls = []
def in_bounds(self, id):
(x, y) = id
return 0 <= x < self.width and 0 <= y < self.height
def passable(self, id):
return id not in self.walls
def neighbors(self, id):
(x, y) = id
results = [(x+1, y), (x, y-1), (x-1, y), (x, y+1)]
if (x + y) % 2 == 0: results.reverse() # aesthetics
results = filter(self.in_bounds, results)
results = filter(self.passable, results)
return results
class GridWithWeights(SquareGrid):
def __init__(self, width, height):
SquareGrid.__init__(self,width, height)
self.weights = {}
def cost(self, from_node, to_node):
return self.weights.get(to_node, 1)
diagram4 = GridWithWeights(10, 10)
diagram4.walls = [(1, 7), (1, 8), (2, 7), (2, 8), (3, 7), (3, 8)]
diagram4.weights = {loc: 5 for loc in [(3, 4), (3, 5), (4, 1), (4, 2),
(4, 3), (4, 4), (4, 5), (4, 6),
(4, 7), (4, 8), (5, 1), (5, 2),
(5, 3), (5, 4), (5, 5), (5, 6),
(5, 7), (5, 8), (6, 2), (6, 3),
(6, 4), (6, 5), (6, 6), (6, 7),
(7, 3), (7, 4), (7, 5)]}
import heapq
class PriorityQueue:
def __init__(self):
self.elements = []
def empty(self):
return len(self.elements) == 0
def put(self, item, priority):
heapq.heappush(self.elements, (priority, item))
def get(self):
return heapq.heappop(self.elements)[1]
def dijkstra_search(graph, start, goal):
frontier = PriorityQueue()
frontier.put(start, 0)
came_from = {}
cost_so_far = {}
came_from[start] = None
cost_so_far[start] = 0
while not frontier.empty():
current = frontier.get()
if current == goal:
break
for next in graph.neighbors(current):
new_cost = cost_so_far[current] + graph.cost(current, next)
if next not in cost_so_far or new_cost < cost_so_far[next]:
cost_so_far[next] = new_cost
priority = new_cost
frontier.put(next, priority)
came_from[next] = current
return came_from, cost_so_far
def reconstruct_path(came_from, start, goal):
current = goal
path = [current]
while current != start:
current = came_from[current]
path.append(current)
path.append(start) # optional
path.reverse() # optional
return path
def heuristic(a, b):
(x1, y1) = a
(x2, y2) = b
return abs(x1 - x2) + abs(y1 - y2)
def a_star_search(graph, start, goal):
frontier = PriorityQueue()
frontier.put(start, 0)
came_from = {}
cost_so_far = {}
came_from[start] = None
cost_so_far[start] = 0
while not frontier.empty():
current = frontier.get()
if current == goal:
break
for next in graph.neighbors(current):
new_cost = cost_so_far[current] + graph.cost(current, next)
if next not in cost_so_far or new_cost < cost_so_far[next]:
cost_so_far[next] = new_cost
priority = new_cost + heuristic(goal, next)
frontier.put(next, priority)
came_from[next] = current
return came_from, cost_so_far
|
[
"[email protected]"
] | |
bbbb9c609651e91e3a3c15c139ff1b5813c22879
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startPyquil2333.py
|
c971dffe1465e621fa1a309de3e74ac9949af7f2
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,698 |
py
|
# qubit number=4
# total number=29
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += CNOT(0,3) # number=14
prog += X(3) # number=15
prog += RX(1.8001325905069514,3) # number=18
prog += CNOT(0,3) # number=16
prog += H(1) # number=22
prog += H(1) # number=2
prog += H(2) # number=3
prog += H(3) # number=4
prog += H(0) # number=5
prog += X(3) # number=24
prog += H(1) # number=6
prog += X(1) # number=25
prog += H(2) # number=7
prog += H(3) # number=8
prog += CNOT(1,0) # number=26
prog += Z(1) # number=27
prog += CNOT(1,0) # number=28
prog += H(0) # number=9
prog += CNOT(2,0) # number=10
prog += X(1) # number=17
prog += CNOT(2,0) # number=11
prog += Y(0) # number=12
prog += Y(0) # number=13
prog += CNOT(2,1) # number=23
prog += X(0) # number=19
prog += X(0) # number=20
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil2333.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
|
[
"[email protected]"
] | |
fbddef4b9d48e173fddbe92424567a8926db63a3
|
94c7440e7f1d2fdbe4a1e26b9c75a94e49c14eb4
|
/leetcode/303.py
|
3e11c0d02ea85837838c1abfd9fcbb8f9d209292
|
[
"Apache-2.0"
] |
permissive
|
windniw/just-for-fun
|
7ddea4f75cf3466a400b46efe36e57f6f7847c48
|
44e1ff60f8cfaf47e4d88988ee67808f0ecfe828
|
refs/heads/master
| 2022-08-18T09:29:57.944846 | 2022-07-25T16:04:47 | 2022-07-25T16:04:47 | 204,949,602 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 432 |
py
|
"""
link: https://leetcode.com/problems/range-sum-query-immutable
problem: 离线计算数组区间和
solution: 转存 sum[:i]
"""
class NumArray:
def __init__(self, nums: List[int]):
self.s = [0 for _ in range(len(nums) + 1)]
for i in range(1, len(nums) + 1):
self.s[i] = self.s[i - 1] + nums[i - 1]
def sumRange(self, i: int, j: int) -> int:
return self.s[j + 1] - self.s[i]
|
[
"[email protected]"
] | |
150ada0104f487967baa8037bdf9800d1d660c71
|
d10c5d3603e027a8fd37115be05e62634ec0f0a5
|
/13_Machine-Learning-with-Tree-Based-Models-in-Python/13_ex_1-12.py
|
8bc8ee02a70ea444f217bbab5bc0d3c2c3a249c6
|
[] |
no_license
|
stacygo/2021-01_UCD-SCinDAE-EXS
|
820049125b18b38ada49ffc2036eab33431d5740
|
027dc2d2878314fc8c9b2796f0c2e4c781c6668d
|
refs/heads/master
| 2023-04-29T01:44:36.942448 | 2021-05-23T15:29:28 | 2021-05-23T15:29:28 | 335,356,448 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,032 |
py
|
# Exercise 1-12: Linear regression vs regression tree
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.tree import DecisionTreeRegressor
from sklearn.metrics import mean_squared_error as MSE
SEED = 3
df = pd.read_csv('input/auto.csv')
y = df['mpg']
X = pd.get_dummies(df.drop(['mpg'], axis=1))
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=SEED)
dt = DecisionTreeRegressor(max_depth=8, min_samples_leaf=0.13, random_state=SEED)
dt.fit(X_train, y_train)
y_pred = dt.predict(X_test)
mse_dt = MSE(y_test, y_pred)
rmse_dt = mse_dt**(1/2)
lr = LinearRegression()
lr.fit(X_train, y_train)
# Predict test set labels
y_pred_lr = lr.predict(X_test)
# Compute mse_lr
mse_lr = MSE(y_test, y_pred_lr)
# Compute rmse_lr
rmse_lr = mse_lr**(1/2)
# Print rmse_lr
print('Linear Regression test set RMSE: {:.2f}'.format(rmse_lr))
# Print rmse_dt
print('Regression Tree test set RMSE: {:.2f}'.format(rmse_dt))
|
[
"[email protected]"
] | |
f13dd503a9b25ec0cf197860872374891737e452
|
24c84c5b93cd816976d370a99982f45e0d18a184
|
/ArraysProblem/Python/FindAllNumbersDisappearedinAnArray.py
|
25420fb3ce55ce8bdb7c4beb3f9a49d0977405c8
|
[] |
no_license
|
purushottamkaushik/DataStructuresUsingPython
|
4ef1cf33f1af3fd25105a45be4f179069e327628
|
e016fe052c5600dcfbfcede986d173b401ed23fc
|
refs/heads/master
| 2023-03-12T13:25:18.186446 | 2021-02-28T18:21:37 | 2021-02-28T18:21:37 | 343,180,450 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 719 |
py
|
class Solution:
def findDisappearedNumbers(self, nums):
lst = []
if not nums:
return lst
m = max(nums)
for i in range(len(nums)):
print(i)
if i+1 in nums:
continue
else:
lst.append(i+1)
return lst
def findDisappearedNumbers2(self, nums):
s = set(nums)
n = len(nums) + 1
lst = []
for i in range(1,n):
if i not in s:
lst.append(i)
return lst
s = Solution().findDisappearedNumbers([1,1])
print(s)
|
[
"[email protected]"
] | |
ce59e45ee8cddd99cedd8e16aefcff92641a326a
|
8214e7369f2b86f19602eaffe9e8072f336391bb
|
/tasks.py
|
e4ab65a9167e0813e7287c98ba19959386973525
|
[
"BSD-3-Clause"
] |
permissive
|
pydev-git/cookiecutter_flask_docker
|
27dc47e69a957bd89aeb76db13cc0a08897cd467
|
0bbe0f366d0d8d914b02518c94f5ff75d03386b5
|
refs/heads/master
| 2021-06-01T11:30:30.912658 | 2016-07-27T08:05:18 | 2016-07-27T08:05:18 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,184 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Invoke tasks."""
import os
import json
import shutil
from invoke import task, run
HERE = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(HERE, 'cookiecutter.json'), 'r') as fp:
COOKIECUTTER_SETTINGS = json.load(fp)
# Match default value of app_name from cookiecutter.json
COOKIE = os.path.join(HERE, COOKIECUTTER_SETTINGS['app_name'])
REQUIREMENTS = os.path.join(COOKIE, 'requirements', 'dev.txt')
@task
def build():
"""Build the cookiecutter."""
run('cookiecutter {0} --no-input'.format(HERE))
@task
def clean():
"""Clean out generated cookiecutter."""
if os.path.exists(COOKIE):
shutil.rmtree(COOKIE)
print('Removed {0}'.format(COOKIE))
else:
print('App directory does not exist. Skipping.')
def _run_manage_command(command):
run('python {0} {1}'.format(os.path.join(COOKIE, 'manage.py'), command), echo=True)
@task(pre=[clean, build])
def test():
"""Run lint commands and tests."""
run('pip install -r {0} --ignore-installed'.format(REQUIREMENTS), echo=True)
os.chdir(COOKIE)
_run_manage_command('lint')
_run_manage_command('test')
|
[
"[email protected]"
] | |
e06215fdfb4e2456cf5f6f26ef24b108051d7371
|
cd9eb87e3e1b04e6f421377eff02514de05c98e2
|
/learn_SciPy/scikit-learn/User Guide/1. Supervised learning/1.10. Decision Trees.py
|
2e0907d0cc61331fa0146ca0c4f1677688f35028
|
[] |
no_license
|
zhaojinxi/learn_python
|
45f116f9729bbf19d9bb4a574b06e0ec41f754dc
|
07b4a5a231e39b6d2c28f98e99a3a8fe3cb534c4
|
refs/heads/master
| 2021-06-05T22:00:02.528023 | 2020-03-22T04:19:22 | 2020-03-22T04:19:22 | 129,857,802 | 4 | 3 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,079 |
py
|
import sklearn.tree
import sklearn.datasets
import graphviz
#1.10.1. Classification
X = [[0, 0], [1, 1]]
Y = [0, 1]
clf = sklearn.tree.DecisionTreeClassifier()
clf = clf.fit(X, Y)
clf.predict([[2., 2.]])
clf.predict_proba([[2., 2.]])
iris = sklearn.datasets.load_iris()
clf = sklearn.tree.DecisionTreeClassifier()
clf = clf.fit(iris.data, iris.target)
dot_data = sklearn.tree.export_graphviz(clf, out_file=None)
graph = graphviz.Source(dot_data)
graph.render("iris")
dot_data = sklearn.tree.export_graphviz(clf, out_file=None, feature_names=iris.feature_names, class_names=iris.target_names, filled=True, rounded=True, special_characters=True)
graph = graphviz.Source(dot_data)
graph
clf.predict(iris.data[:1, :])
clf.predict_proba(iris.data[:1, :])
#1.10.2. Regression
X = [[0, 0], [2, 2]]
y = [0.5, 2.5]
clf = sklearn.tree.DecisionTreeRegressor()
clf = clf.fit(X, y)
clf.predict([[1, 1]])
#1.10.3. Multi-output problems
#1.10.4. Complexity
#1.10.5. Tips on practical use
#1.10.6. Tree algorithms: ID3, C4.5, C5.0 and CART
#1.10.7. Mathematical formulation
|
[
"[email protected]"
] | |
ab37819178678efc8832a481c7d0f60c89cf7dfe
|
c27e78d35cdc802e4790280c384a0f97acf636ef
|
/src/rulesTest.py
|
c7cda2cbd1a74e52e447aefbc5576b0f6f3b5dc3
|
[] |
no_license
|
undersea/Special_Topic
|
99e424d9e443523a4d880ef478455bb75d7c82cd
|
7bf7ed2c92b864d99790b927965bad819bfb7cfb
|
refs/heads/master
| 2020-03-25T04:01:26.909441 | 2011-05-30T03:26:53 | 2011-05-30T03:26:53 | 3,587,506 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 766 |
py
|
from unittest import TestCase, main
from rules import Degree
class TestRules(TestCase):
def setUp(self):
print "setUp"
self.rules = Degree()
pass
def tearDown(self):
print "tearDown"
del self.rules
pass
def testAdd(self):
count = len(self.rules.rules)
rule = ("one of","one")
self.rules.add(rule)
self.assertEqual(count, 0)
self.assertEqual(len(self.rules.rules), 1)
def testDelete(self):
rule = ("one of","one")
self.rules.rules.append(rule)
count = len(self.rules.rules)
self.assertEqual(count, 1)
self.rules.delete(rule)
self.assertEqual(len(self.rules.rules), 0)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
4698bbd10d6f9865b9e14c4ccd5f0c59b5bd7996
|
8f506513cb73d9bdb5dbdd9084aaba020b1efbea
|
/Course_1-Algorithmic_Toolbox/Week-1/Excercise_Challenges/2_maximum_pairwise_product/max_pairwise_product.py
|
16ef706e59671dba4d782c766223be8cf322274f
|
[] |
no_license
|
KhanAjmal007/Data-Structures-and-Algorithms-Specialization-Coursera
|
1255ecf877ecd4a91bda8b85e9c96566fe6d5e4d
|
ab6e618c5d8077febb072091e80c16f5f1a15465
|
refs/heads/master
| 2023-03-21T04:18:04.580423 | 2020-07-11T07:18:06 | 2020-07-11T07:18:06 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 401 |
py
|
def max_pairwise_product(numbers):
max1 = -999
max2 = -9999
for value in numbers:
if value > max1:
max2 = max1
max1 = value
elif value > max2:
max2 = value
return max1 * max2
if __name__ == '__main__':
input_n = int(input())
input_numbers = [int(x) for x in input().split()]
print(max_pairwise_product(input_numbers))
|
[
"[email protected]"
] | |
f2f4d6b715cc9b11ba5174b51906804ad1a1ca7e
|
544cfadc742536618168fc80a5bd81a35a5f2c99
|
/tools/external_updater/base_updater.py
|
18d4435858c7a22b295ca26455f4abbaf44d16d4
|
[] |
no_license
|
ZYHGOD-1/Aosp11
|
0400619993b559bf4380db2da0addfa9cccd698d
|
78a61ca023cbf1a0cecfef8b97df2b274ac3a988
|
refs/heads/main
| 2023-04-21T20:13:54.629813 | 2021-05-22T05:28:21 | 2021-05-22T05:28:21 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,590 |
py
|
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for all updaters."""
from pathlib import Path
import fileutils
# pylint: disable=import-error
import metadata_pb2 # type: ignore
class Updater:
"""Base Updater that defines methods common for all updaters."""
def __init__(self, proj_path: Path, old_url: metadata_pb2.URL,
old_ver: str) -> None:
self._proj_path = fileutils.get_absolute_project_path(proj_path)
self._old_url = old_url
self._old_ver = old_ver
self._new_url = metadata_pb2.URL()
self._new_url.CopyFrom(old_url)
self._new_ver = old_ver
self._has_errors = False
def is_supported_url(self) -> bool:
"""Returns whether the url is supported."""
raise NotImplementedError()
def check(self) -> None:
"""Checks whether a new version is available."""
raise NotImplementedError()
def update(self) -> None:
"""Updates the package.
Has to call check() before this function.
"""
raise NotImplementedError()
@property
def project_path(self) -> Path:
"""Gets absolute path to the project."""
return self._proj_path
@property
def current_version(self) -> str:
"""Gets the current version."""
return self._old_ver
@property
def current_url(self) -> metadata_pb2.URL:
"""Gets the current url."""
return self._old_url
@property
def latest_version(self) -> str:
"""Gets latest version."""
return self._new_ver
@property
def latest_url(self) -> metadata_pb2.URL:
"""Gets URL for latest version."""
return self._new_url
@property
def has_errors(self) -> bool:
"""Gets whether this update had an error."""
return self._has_errors
def use_current_as_latest(self):
"""Uses current version/url as the latest to refresh project."""
self._new_ver = self._old_ver
self._new_url = self._old_url
|
[
"[email protected]"
] | |
a85110d0091d407c2364cee12549f5de1adf8a07
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_5744014401732608_0/Python/ArbokEkans/C.py
|
413f74cc89b84cfd4dc8d0ba77d001600a4d53ea
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null |
UTF-8
|
Python
| false | false | 979 |
py
|
def solve():
b, m = [int(i) for i in input().split()]
if m > 2**(b-2):
return "IMPOSSIBLE"
else:
graph = construct(b)
rep = bin(m)[2:][::-1]
if m == 2**(b-2):
for key in graph:
if key != b-1:
graph[key].append(b-1)
else:
for i, digit in enumerate(rep):
if digit == "1":
graph[i+1].append(b-1)
res = ["POSSIBLE"]
for i in range(b):
row = []
for j in range(b):
if j in graph[i]:
row.append(1)
else:
row.append(0)
res.append(''.join(str(x) for x in row))
return '\n'.join(res)
def construct(b):
d = {i:list(range(i+1,b-1)) for i in range(b) }
return d
n_cases = int(input())
for n_case in range(n_cases):
print("Case #{}: {}".format(n_case+1, solve()))
|
[
"[email protected]"
] | |
36b479f0a4a7e4c24279afbf988d9396960305bd
|
81a9840c702927b4ca9ef17b766064f1d3c9139d
|
/mantabot/apps/moderation/handlers/readonly.py
|
212eafdce5f9d13f3499cb72f207fa73becc05d9
|
[
"MIT"
] |
permissive
|
spectras/mantabot
|
58b2d996ccd359c7720006b87ab94db1ac07956f
|
9b2de297d46224d66a84b8925e09cc209d8b37d4
|
refs/heads/master
| 2020-03-19T12:42:20.893443 | 2018-06-07T23:25:09 | 2018-06-07T23:25:09 | 136,534,522 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,065 |
py
|
import discord
from mantabot.apps.moderation import service
class ReadOnly(object):
""" Simple plugin that deletes messages sent to some channels """
name = 'moderation.readonly'
def __init__(self, client):
self.client = client
async def on_message(self, message):
channel = message.channel
if not isinstance(channel, discord.abc.GuildChannel):
return
if message.author.bot:
return
# Handle readonly
if await service.get_readonly(channel):
try:
await message.delete()
except discord.NotFound:
pass # this is okay, message is already deleted
except discord.Forbidden:
await service.set_readonly(channel, False, user=channel.guild.me, reason='forbidden')
# Handle mutes
if await service.get_channel_member_muted(channel, message.author):
try:
await message.delete()
except (discord.NotFound, discord.Forbidden):
pass
|
[
"[email protected]"
] | |
18426ac763d7a141d3556b448fb271532e0d54af
|
3c3095585c075002b707475b49bdd8d8c7d4b71d
|
/InvenTree/InvenTree/urls.py
|
d9600333f4698fcd539486876a45dfd4ae42af04
|
[
"MIT"
] |
permissive
|
andyseracuse/InvenTree
|
ffa7c0a2d131b363c0b93c2d888a9a89c0048bf7
|
c5166ec845ffe9477ab488931775dcdfd1dce7e7
|
refs/heads/master
| 2022-06-08T12:54:11.522718 | 2020-04-20T09:30:58 | 2020-04-20T09:30:58 | 258,296,796 | 0 | 0 |
MIT
| 2020-04-23T18:33:12 | 2020-04-23T18:33:11 | null |
UTF-8
|
Python
| false | false | 4,211 |
py
|
"""
Top-level URL lookup for InvenTree application.
Passes URL lookup downstream to each app as required.
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.contrib.auth import views as auth_views
from qr_code import urls as qr_code_urls
from company.urls import company_urls
from company.urls import supplier_part_urls
from company.urls import price_break_urls
from common.urls import common_urls
from part.urls import part_urls
from stock.urls import stock_urls
from build.urls import build_urls
from order.urls import order_urls
from common.api import common_api_urls
from part.api import part_api_urls, bom_api_urls
from company.api import company_api_urls
from stock.api import stock_api_urls
from build.api import build_api_urls
from order.api import po_api_urls
from django.conf import settings
from django.conf.urls.static import static
from django.views.generic.base import RedirectView
from rest_framework.documentation import include_docs_urls
from .views import IndexView, SearchView, DatabaseStatsView
from .views import SettingsView, EditUserView, SetPasswordView
from .api import InfoView, BarcodePluginView, ActionPluginView
from users.urls import user_urls
admin.site.site_header = "InvenTree Admin"
apipatterns = [
url(r'^common/', include(common_api_urls)),
url(r'^part/', include(part_api_urls)),
url(r'^bom/', include(bom_api_urls)),
url(r'^company/', include(company_api_urls)),
url(r'^stock/', include(stock_api_urls)),
url(r'^build/', include(build_api_urls)),
url(r'^po/', include(po_api_urls)),
# User URLs
url(r'^user/', include(user_urls)),
# Plugin endpoints
url(r'^barcode/', BarcodePluginView.as_view(), name='api-barcode-plugin'),
url(r'^action/', ActionPluginView.as_view(), name='api-action-plugin'),
# InvenTree information endpoint
url(r'^$', InfoView.as_view(), name='api-inventree-info'),
]
settings_urls = [
url(r'^user/?', SettingsView.as_view(template_name='InvenTree/settings/user.html'), name='settings-user'),
url(r'^currency/?', SettingsView.as_view(template_name='InvenTree/settings/currency.html'), name='settings-currency'),
url(r'^part/?', SettingsView.as_view(template_name='InvenTree/settings/part.html'), name='settings-part'),
url(r'^other/?', SettingsView.as_view(template_name='InvenTree/settings/other.html'), name='settings-other'),
# Catch any other urls
url(r'^.*$', SettingsView.as_view(template_name='InvenTree/settings/user.html'), name='settings'),
]
urlpatterns = [
url(r'^part/', include(part_urls)),
url(r'^supplier-part/', include(supplier_part_urls)),
url(r'^price-break/', include(price_break_urls)),
url(r'^common/', include(common_urls)),
url(r'^stock/', include(stock_urls)),
url(r'^company/', include(company_urls)),
url(r'^order/', include(order_urls)),
url(r'^build/', include(build_urls)),
url(r'^auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^login/', auth_views.LoginView.as_view(), name='login'),
url(r'^logout/', auth_views.LogoutView.as_view(template_name='registration/logout.html'), name='logout'),
url(r'^settings/', include(settings_urls)),
url(r'^edit-user/', EditUserView.as_view(), name='edit-user'),
url(r'^set-password/', SetPasswordView.as_view(), name='set-password'),
url(r'^admin/', admin.site.urls, name='inventree-admin'),
url(r'^qr_code/', include(qr_code_urls, namespace='qr_code')),
url(r'^index/', IndexView.as_view(), name='index'),
url(r'^search/', SearchView.as_view(), name='search'),
url(r'^stats/', DatabaseStatsView.as_view(), name='stats'),
url(r'^api/', include(apipatterns)),
url(r'^api-doc/', include_docs_urls(title='InvenTree API')),
url(r'^markdownx/', include('markdownx.urls')),
]
# Static file access
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
# Media file access
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Send any unknown URLs to the parts page
urlpatterns += [url(r'^.*$', RedirectView.as_view(url='/index/', permanent=False), name='index')]
|
[
"[email protected]"
] | |
8fddcccf8a50a7b00db56be3a20a3f31347fac88
|
82d588161a8f8cd27c3031c779120ea4380791b9
|
/yejin/삼성 SW 역량 테스트 기출 문제/2021 하반기/13458.py
|
0e77a2dfcaf4b39f4e2dc3fcea152240ebab6c5f
|
[] |
no_license
|
Yejin6911/Algorithm_Study
|
3aa02a7d07169382a78c049d1de8251a52da816c
|
98c968bfeed17ab6b62e3a077280e0310f08190a
|
refs/heads/master
| 2023-09-01T00:31:07.212413 | 2021-10-24T07:56:21 | 2021-10-24T07:56:21 | 345,009,057 | 1 | 1 | null | 2021-09-20T13:08:33 | 2021-03-06T04:57:34 |
Python
|
UTF-8
|
Python
| false | false | 343 |
py
|
import sys
import math
input = sys.stdin.readline
n = int(input())
A = list(map(int, input().split()))
B, C = map(int, input().split())
total = n
# 총감독관 감시 인원 제외
for i in range(n):
if A[i] <= B:
A[i] = 0
else:
A[i] -= B
# 부감독관 인원 계산
total += math.ceil(A[i]/C)
print(total)
|
[
"[email protected]"
] | |
a0fb061548bfd69cb764cc4823ae29227aa804a6
|
0e8ab63a60fd03b1778aa392c0b11fedd88409e4
|
/ingest/ingest/manager.py
|
8ed7b3d707ce64b45eb7b82fa5323c3a84a15a39
|
[] |
no_license
|
Kyeongrok/dms
|
babeb19115355c3d930c94c89ca55d3e5de2dc55
|
a67c446f0ffd3f9a1812de961ef915c405a4096f
|
refs/heads/master
| 2021-06-23T22:44:18.881538 | 2019-09-26T03:42:13 | 2019-09-26T03:42:13 | 210,993,619 | 0 | 0 | null | 2021-03-25T22:57:32 | 2019-09-26T03:41:24 |
Python
|
UTF-8
|
Python
| false | false | 2,747 |
py
|
import abc
import logging
import os
from dmsclient.client import DMSClient
from dmsclient.exceptions import DMSClientException
from ingest import util
from ingest.logger import ElasticsearchHandler, JournalFormatter
class AbstractIngestManager(abc.ABC):
def __init__(self, config, mount_path, reader_id, cartridge_id):
self.log = logging.getLogger('ingest.manager')
self.config = config
self.thread_count = config['general']['threads']
self.check_mountpoints = config['general']['check_mountpoints']
self.ignore_directories = config['general']['ignore_directories']
self.log_to_es = config['general']['log_to_es']
self.mount_path = mount_path
self.reader_id = reader_id
self.cartridge_id = cartridge_id
self.client = DMSClient(es_endpoint=config['elasticsearch']['endpoint'],
es_user=config['elasticsearch']['user'],
es_password=config['elasticsearch']['password'],
create_templates=config['elasticsearch']['create_templates'],
verify_templates=config['elasticsearch']['verify_templates'])
if self.log_to_es:
handler = ElasticsearchHandler(self.client)
formatter = JournalFormatter()
handler.setFormatter(formatter)
root_logger = logging.getLogger('ingest')
root_logger.addHandler(handler)
if not self.mount_path.startswith('rsync://'):
try:
self.mount_path = os.path.abspath(self.mount_path)
self.__check_path(self.mount_path, readwrite=False)
except Exception as e:
self.log.error('Error checking the input path. {}'.format(str(e),))
raise e
def update_reader(self, message):
if self.reader_id:
self.client.readers.set_message(self.reader_id, message)
def set_cartridge_workflow_type(self, cartridge_id, workflow_type):
if self.cartridge_id:
self.client.cartridges.set_workflow_type(self.cartridge_id, workflow_type)
@abc.abstractmethod
def run(self):
pass
def __check_path(self, path, readwrite=False):
if path.startswith('rsync://'):
return
if readwrite:
self.log.info("Checking write permissions on path '%s'" % (path,))
if not util.isWritable(path):
raise Exception('Cannot write to directory: %s' % (path,))
else:
self.log.info("Checking read permissions on path '%s'" % (path,))
if not util.isReadable(path):
raise Exception('Cannot read from directory: %s' % (path,))
|
[
"[email protected]"
] | |
1d3aa6d35106c3460d100c2156236cc0871312ec
|
fc5becca3e2e48a444b512e059df1cd21601829b
|
/Aulas/Aula19A.py
|
4d8089077e3bdd14ae5f3b3b6ced29a4100d4556
|
[
"MIT"
] |
permissive
|
Felix-xilef/Curso-de-Python
|
c44bf8c22b393aefaed3a2bb3127ef7999e27fb8
|
cdff7c7f3850e6326e274c8c1987b9e1a18ce910
|
refs/heads/master
| 2021-05-19T11:09:22.644638 | 2020-04-01T22:09:02 | 2020-04-01T22:09:02 | 251,665,966 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 482 |
py
|
# Dicionários {} / dict() - como uma lista, porém o indice pode ser definido (key)
pessoas = {'nome': 'Felix', 'sexo': 'm', 'idade': 18}
print(pessoas)
print(pessoas['nome'])
print(pessoas['idade'])
print(pessoas.values())
print(pessoas.keys())
print(pessoas.items())
for k, v in pessoas.items():
print(k, '=', v)
del pessoas['sexo']
print(pessoas)
pessoas['nome'] = 'Gustavo'
print(pessoas)
pessoas['peso'] = 74
print(pessoas)
input('\n\nPressione <enter> para continuar')
|
[
"[email protected]"
] | |
c4b90c1495df475c554108312c8e2a94b88ee10d
|
ef66e297a49d04098d98a711ca3fda7b8a9a657c
|
/Python/display.py
|
1b280e0ad29c46c1e08530191b08e20ef0df52eb
|
[] |
no_license
|
breezy1812/MyCodes
|
34940357954dad35ddcf39aa6c9bc9e5cd1748eb
|
9e3d117d17025b3b587c5a80638cb8b3de754195
|
refs/heads/master
| 2020-07-19T13:36:05.270908 | 2018-12-15T08:54:30 | 2018-12-15T08:54:30 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,897 |
py
|
# coding: UTF-8
__metaclass__ = type
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
import random
import socket
i = 0
winId = 0
s = None
poses = []
SIZE = [800, 600]
clear = True
def Draw():
global poses
global clear
glEnable(GL_POINT_SMOOTH)
if clear:
glClear(GL_COLOR_BUFFER_BIT)
clear = False
glPointSize(5)
glBegin(GL_POINTS)
for item in poses:
try:
if item[2] == 1:
glVertex2f(item[0], item[1])
elif item[2] == -1:
clear = True
except:
pass
poses = []
glEnd()
glFlush()
def Update():
global s
global poses
try:
data = s.recv(4096).split('|')
poses = map(lambda x: map(lambda y: int(y), x.split(',')), data)
if not data:
raise Exception
except Exception, e:
print e
s.close()
sys.exit(0)
for item in poses:
item[0] = (item[0]*1.0/SIZE[0]*200-100)/100.0
item[1] = -((item[1]*1.0/SIZE[1]*200-100))/100.0
print poses
glutPostRedisplay()
def keyboardHit(key, mouseX, mouseY):
if key == 'q':
global s
glutDestroyWindow(winId)
s.close()
sys.exit()
def mouseHit(button, state, mouseX, mouseY):
pass
def mouseMotion(mouseX, mouseY):
pass
def main():
global winId
global s
s = socket.socket()
host = socket.gethostname()
s.connect((host, 1234))
glutInit()
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGBA)
glutInitWindowSize(SIZE[0], SIZE[1])
winId = glutCreateWindow("David")
glutDisplayFunc(Draw)
glutIdleFunc(Update)
glutKeyboardFunc(keyboardHit)
glutMouseFunc(mouseHit)
glutMotionFunc(mouseMotion)
glutMainLoop()
if __name__ == '__main__':
try:
main()
except Exception, e:
print e
|
[
"[email protected]"
] | |
e57f6351bc13444d18ec9ae6b667d6e3d4b37ed4
|
a7e75fcd05aa8ebf2066c4eb0a05496042dd5ded
|
/better_work_data/better_work_data/items.py
|
ab7aeb32e62a563ca44dce609a18c2de91fd0b79
|
[
"MIT"
] |
permissive
|
JackDan9/miniProgram
|
d6fe14fced0f9a154d01a6f950ab26325ed445de
|
d60a33275334b4caa3c15d5c6196938fb800505b
|
refs/heads/master
| 2023-02-10T13:26:23.453536 | 2023-01-09T03:41:43 | 2023-01-09T03:41:43 | 132,235,452 | 1 | 0 |
MIT
| 2023-02-08T00:42:41 | 2018-05-05T09:55:32 |
JavaScript
|
UTF-8
|
Python
| false | false | 515 |
py
|
# Define here the models for your scraped items
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
import scrapy
class BetterWorkDataItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
order = scrapy.Field()
title = scrapy.Field()
summary = scrapy.Field()
source_type = scrapy.Field()
source_name = scrapy.Field()
publish_on = scrapy.Field()
created_on = scrapy.Field()
updated_on = scrapy.Field()
pass
|
[
"[email protected]"
] | |
dc52624b745e24996b18b3d581240cefcbe6d403
|
04934bc61ceab01ad24ab1672461a1b103884e91
|
/ssh/sshconn_002.py
|
da12bca4af6f78377d324cb95c618bfe4ad0dab7
|
[] |
no_license
|
aiedonline/aulapentest
|
05f31d0410493f02361fe778ab02d584aa84ef5e
|
1dd28feb95941f49205af836c9013283b4cb6b99
|
refs/heads/main
| 2023-08-18T10:22:19.596876 | 2021-09-26T20:14:50 | 2021-09-26T20:14:50 | 402,219,644 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,257 |
py
|
#!/usr/bin/python
import sys;
from netmiko import Netmiko
ip = "11.11.11.171";
print("\033[1;33m[*] - SSH Bruteforce Attack", " \033[0;0m");
print("\033[1;33m[*] - SSH target", ip, " \033[0;0m");
with open("user.txt") as users:
users = users.readlines();
for user in users:
passwords_testados = [];
with open("password.txt") as passwords:
passwords = passwords.readlines();
passwords.insert(0, user); # a senha mais usada e o proprio usuario
for password in passwords:
try:
if password in passwords_testados:
continue;
sshconn = Netmiko(ip, username= user.strip(), password=password.strip(), device_type="linux");
sshconn.disconnect();
print("\033[1;32m[+] SUCES PARA", user.strip(), password.strip(), " \033[0;0m");
except KeyboardInterrupt:
print( 'Usuarqio quer sair.');
sys.exit(0);
except:
print("\033[1;31m[-] FALHA PARA", user.strip(), password.strip(), " \033[0;0m");
finally:
passwords_testados.insert(0, password);
|
[
"[email protected]"
] | |
6fdea119f9c9239b63eda3db6b7c2b1d0233e66d
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02585/s776126936.py
|
78cc9d2cac5fd2a3bfb611ed540139e54d721039
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,175 |
py
|
import sys
sys.setrecursionlimit(10**7)
readline = sys.stdin.buffer.readline
def readstr():return readline().rstrip().decode()
def readstrs():return list(readline().decode().split())
def readint():return int(readline())
def readints():return list(map(int,readline().split()))
def printrows(x):print('\n'.join(map(str,x)))
def printline(x):print(' '.join(map(str,x)))
def check(cir,num):
m = len(cir)
a = sum(cir)
if num == 0:
ss = 0
elif num == 1:
ss = max(cir)
else:
ac = list(accumulate([0]+cir))
l = 0
r = 1
ss = ac[r]-ac[l]
i = 0
while 1:
if r == m:
l = ac[l+1:r].index(min(ac[l+1:r])) + l+1
ss = max(ss,ac[r]-ac[l])
break
elif i%2==0:
r = ac[r+1:l+num+1].index(max(ac[r+1:l+num+1])) + r+1
else:
l = ac[l+1:r].index(min(ac[l+1:r])) + l+1
i+=1
ss = max(ss,ac[r]-ac[l])
num = m-num
l = 0
r = num
i = 0
ss = max(ss,a-ac[r]+ac[l])
while 1:
if r == m:
l = ac[l+1:r-num+1].index(max(ac[l+1:r-num+1])) + l+1
ss = max(ss,a-ac[r]+ac[l])
break
elif i%2==0:
r = ac[r+1:l+m].index(min(ac[r+1:l+m])) + r+1
else:
l = ac[l+1:r-num+1].index(max(ac[l+1:r-num+1])) + l+1
i+=1
ss = max(ss,a-ac[r]+ac[l])
return ss
from itertools import accumulate
n,k = readints()
p = [x-1 for x in readints()]
c = readints()
circles = []
used = [0]*n
for i in range(n):
if not used[i]:
circles.append([c[i]])
used[i] = 1
j = p[i]
while not used[j]:
circles[-1].append(c[j])
used[j] = 1
j = p[j]
score = -10**20
for cir in circles:
m = len(cir)
a = sum(cir)
if k>m:
if a>0:
score = max(score, (k//m)*a + check(cir,k%m), (k//m-1)*a + check(cir,m))
else:
score = max(score,check(cir,m))
else:
score = max(score,check(cir,k))
print(score)
|
[
"[email protected]"
] | |
e99b1b904a183481565ed38808f38f03702f4e60
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2739/60825/244336.py
|
391935b7e27570792c33d23a3858845f5b95b823
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 440 |
py
|
res=[]
def printAns(currList, currSum, target, k):
if(currSum==target&&currList.size()==k):
res.append(currList)
return
elif currSum>target||currList.size()>k:
return
else:
for i in range(currList[len(currList)-1], 9):
t=currList[:]
t.append(i)
printAns(t, currSum+i, target, k)
s=input()
k=int(s[0])
target=int(s[3:])
printAns([], 0, target, k)
print(res)
|
[
"[email protected]"
] | |
3e241bca87c1106e07b8d5ffd8e53da25cae808a
|
8e1141fb8d9bf02d7e1c2fb887d66049d0860714
|
/InvenTree/build/models.py
|
d09e7518785858212cb1d0f2ae5b953b0b916930
|
[
"MIT"
] |
permissive
|
ksanchezcld/InvenTree
|
73ec392db5149814604e79690b465ae900af0c94
|
ceea0533686305077c07c78ffa20ab4227ce2cf4
|
refs/heads/master
| 2023-02-28T10:07:02.741814 | 2018-05-12T02:44:29 | 2018-05-12T02:44:29 | 165,738,059 | 1 | 0 |
MIT
| 2023-02-11T19:31:42 | 2019-01-14T21:28:53 |
JavaScript
|
UTF-8
|
Python
| false | false | 3,216 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
from django.db import models
from django.core.validators import MinValueValidator
class Build(models.Model):
""" A Build object organises the creation of new parts from the component parts
It uses the part BOM to generate new parts.
Parts are then taken from stock
"""
def get_absolute_url(self):
return '/build/{pk}/'.format(pk=self.id)
# Build status codes
PENDING = 10 # Build is pending / active
HOLDING = 20 # Build is currently being held
CANCELLED = 30 # Build was cancelled
COMPLETE = 40 # Build is complete
BUILD_STATUS_CODES = {PENDING: _("Pending"),
HOLDING: _("Holding"),
CANCELLED: _("Cancelled"),
COMPLETE: _("Complete"),
}
batch = models.CharField(max_length=100, blank=True, null=True,
help_text='Batch code for this build output')
# Status of the build
status = models.PositiveIntegerField(default=PENDING,
choices=BUILD_STATUS_CODES.items(),
validators=[MinValueValidator(0)])
# Date the build model was 'created'
creation_date = models.DateField(auto_now=True, editable=False)
# Date the build was 'completed'
completion_date = models.DateField(null=True, blank=True)
# Brief build title
title = models.CharField(max_length=100, help_text='Brief description of the build')
# A reference to the part being built
# Only 'buildable' parts can be selected
part = models.ForeignKey('part.Part', on_delete=models.CASCADE,
related_name='builds',
limit_choices_to={'buildable': True},
)
# How many parts to build?
quantity = models.PositiveIntegerField(default=1,
validators=[MinValueValidator(1)],
help_text='Number of parts to build')
# Notes can be attached to each build output
notes = models.TextField(blank=True)
@property
def required_parts(self):
parts = []
for item in self.part.bom_items.all():
part = {'part': item.sub_part,
'per_build': item.quantity,
'quantity': item.quantity * self.quantity
}
parts.append(part)
return parts
@property
def can_build(self):
""" Return true if there are enough parts to supply build
"""
for item in self.required_parts:
if item['part'].total_stock < item['quantity']:
return False
return True
@property
def is_active(self):
""" Is this build active?
An active build is either:
- Pending
- Holding
"""
return self.status in [
self.PENDING,
self.HOLDING
]
@property
def is_complete(self):
return self.status == self.COMPLETE
|
[
"[email protected]"
] | |
3b81da56caa93e61d28fabd2fb15cbe2d6049842
|
af6feb644d2435e1d656556261e5e100209beb1c
|
/helper/show_pred.py
|
3e501e41b0d01880007c112e02a8e8be86dcecf8
|
[
"MIT"
] |
permissive
|
liusida/TorchServe_FaceLandmark_Example
|
f2ca5d1e9cde2eed340ce46584a06cb0e16ef4ac
|
1e854f2f82874255b59ca27b19d3a3254fe69636
|
refs/heads/main
| 2023-04-26T16:25:18.421724 | 2021-05-26T03:25:00 | 2021-05-26T03:25:00 | 370,864,633 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,081 |
py
|
import collections
import matplotlib.pyplot as plt
def show_pred(preds, input_img, only_2d=True, filename=None):
# 2D-Plot
plot_style = dict(marker='o',
markersize=4,
linestyle='-',
lw=2)
pred_type = collections.namedtuple('prediction_type', ['slice', 'color'])
pred_types = {'face': pred_type(slice(0, 17), (0.682, 0.780, 0.909, 0.5)),
'eyebrow1': pred_type(slice(17, 22), (1.0, 0.498, 0.055, 0.4)),
'eyebrow2': pred_type(slice(22, 27), (1.0, 0.498, 0.055, 0.4)),
'nose': pred_type(slice(27, 31), (0.345, 0.239, 0.443, 0.4)),
'nostril': pred_type(slice(31, 36), (0.345, 0.239, 0.443, 0.4)),
'eye1': pred_type(slice(36, 42), (0.596, 0.875, 0.541, 0.3)),
'eye2': pred_type(slice(42, 48), (0.596, 0.875, 0.541, 0.3)),
'lips': pred_type(slice(48, 60), (0.596, 0.875, 0.541, 0.3)),
'teeth': pred_type(slice(60, 68), (0.596, 0.875, 0.541, 0.4))
}
fig = plt.figure(figsize=plt.figaspect(.5))
ax = fig.add_subplot(1, 1 if only_2d else 2, 1)
ax.imshow(input_img)
for pred_type in pred_types.values():
ax.plot(preds[pred_type.slice, 0],
preds[pred_type.slice, 1],
color=pred_type.color, **plot_style)
ax.axis('off')
if not only_2d:
# 3D-Plot
ax = fig.add_subplot(1, 2, 2, projection='3d')
surf = ax.scatter(preds[:, 0] * 1.2,
preds[:, 1],
preds[:, 2],
c='cyan',
alpha=1.0,
edgecolor='b')
for pred_type in pred_types.values():
ax.plot3D(preds[pred_type.slice, 0] * 1.2,
preds[pred_type.slice, 1],
preds[pred_type.slice, 2], color='blue')
ax.view_init(elev=90., azim=90.)
ax.set_xlim(ax.get_xlim()[::-1])
if filename:
plt.savefig(filename)
else:
plt.show()
|
[
"[email protected]"
] | |
b57127734749739690a92ea4af6da4fa3a1d9bd5
|
50948d4cb10dcb1cc9bc0355918478fb2841322a
|
/azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/sku_py3.py
|
8bb382d6481045d2cc41fe140e170b08d4bbffa6
|
[
"MIT"
] |
permissive
|
xiafu-msft/azure-sdk-for-python
|
de9cd680b39962702b629a8e94726bb4ab261594
|
4d9560cfd519ee60667f3cc2f5295a58c18625db
|
refs/heads/master
| 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 |
MIT
| 2020-10-02T01:17:02 | 2019-05-22T07:33:46 |
Python
|
UTF-8
|
Python
| false | false | 3,319 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Sku(Model):
"""The SKU of the storage account.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param name: Required. Gets or sets the sku name. Required for account
creation; optional for update. Note that in older versions, sku name was
called accountType. Possible values include: 'Standard_LRS',
'Standard_GRS', 'Standard_RAGRS', 'Standard_ZRS', 'Premium_LRS'
:type name: str or ~azure.mgmt.storage.v2017_06_01.models.SkuName
:ivar tier: Gets the sku tier. This is based on the SKU name. Possible
values include: 'Standard', 'Premium'
:vartype tier: str or ~azure.mgmt.storage.v2017_06_01.models.SkuTier
:ivar resource_type: The type of the resource, usually it is
'storageAccounts'.
:vartype resource_type: str
:ivar kind: Indicates the type of storage account. Possible values
include: 'Storage', 'BlobStorage'
:vartype kind: str or ~azure.mgmt.storage.v2017_06_01.models.Kind
:ivar locations: The set of locations that the SKU is available. This will
be supported and registered Azure Geo Regions (e.g. West US, East US,
Southeast Asia, etc.).
:vartype locations: list[str]
:ivar capabilities: The capability information in the specified sku,
including file encryption, network acls, change notification, etc.
:vartype capabilities:
list[~azure.mgmt.storage.v2017_06_01.models.SKUCapability]
:param restrictions: The restrictions because of which SKU cannot be used.
This is empty if there are no restrictions.
:type restrictions:
list[~azure.mgmt.storage.v2017_06_01.models.Restriction]
"""
_validation = {
'name': {'required': True},
'tier': {'readonly': True},
'resource_type': {'readonly': True},
'kind': {'readonly': True},
'locations': {'readonly': True},
'capabilities': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'SkuName'},
'tier': {'key': 'tier', 'type': 'SkuTier'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'Kind'},
'locations': {'key': 'locations', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'},
'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
}
def __init__(self, *, name, restrictions=None, **kwargs) -> None:
super(Sku, self).__init__(**kwargs)
self.name = name
self.tier = None
self.resource_type = None
self.kind = None
self.locations = None
self.capabilities = None
self.restrictions = restrictions
|
[
"[email protected]"
] | |
daa712434a43b1506008cb02c38d0182d39483c2
|
b9c55de2b21ca781ab5522da8a1db34ed55bd644
|
/django-app/member/urls.py
|
cee47f79b216288c4dad0072c53238b1df7520be
|
[] |
no_license
|
JeongEuiJin/model-wed-p
|
04a8ed2aa8145a860e214c563fcebae9d7e39692
|
9fb987f5fe65c05825c519d6ef4bd4d802e0dccb
|
refs/heads/master
| 2021-01-21T21:05:07.169721 | 2017-06-19T12:11:51 | 2017-06-19T12:11:51 | 94,772,729 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 198 |
py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.student_list, name='student_list'),
url(r'^(?P<s_pk>\d+)/$', views.student_del, name='student_del')
]
|
[
"[email protected]"
] | |
e789385cd300ec837af0be820f02f87154600e1d
|
8a4a4cab76ddf1b19a017c3e5c765caf9a5fe3cc
|
/test/test_remote_app_permission_update_user.py
|
45ffee2297dddce630ab3389a4f5adc204e15ffc
|
[] |
no_license
|
ibuler/testsdk
|
fa724ff129e2a6144c05b8330cd4014c8bfb9a58
|
015bc6ca7da64180a2a11756a4e7cce733aca806
|
refs/heads/master
| 2020-06-23T09:02:50.322517 | 2019-07-25T05:51:26 | 2019-07-25T05:51:26 | 198,577,933 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,020 |
py
|
# coding: utf-8
"""
Jumpserver API Docs
Jumpserver Restful api docs # noqa: E501
OpenAPI spec version: v1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.remote_app_permission_update_user import RemoteAppPermissionUpdateUser # noqa: E501
from swagger_client.rest import ApiException
class TestRemoteAppPermissionUpdateUser(unittest.TestCase):
"""RemoteAppPermissionUpdateUser unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testRemoteAppPermissionUpdateUser(self):
"""Test RemoteAppPermissionUpdateUser"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.remote_app_permission_update_user.RemoteAppPermissionUpdateUser() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
2fafa7209aecd8c1e4b79ff259093980bd081564
|
b9a73fe404ee30baf8a88276030363ad2d1d4cc5
|
/old/BRNN_Gap/eval.py
|
d8cfef3415e874b7220fbd604a5df6822553a2ff
|
[] |
no_license
|
everglowing/Language-Models
|
06da6befceef9b4fd1f43ba7d6708fcf8862f715
|
67db3fc5d0b0ef099cac306bd78294764d3587cf
|
refs/heads/master
| 2021-01-13T04:12:41.341299 | 2016-12-27T18:53:24 | 2016-12-27T18:53:24 | 77,684,222 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,473 |
py
|
from __future__ import print_function
import numpy as np
import tensorflow as tf
import argparse
import codecs
import time
import os
from six.moves import cPickle
from utils import TextLoader
from model import Model
from six import text_type
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--save_dir', type=str, default='save',
help='model directory to store checkpointed models')
parser.add_argument('--text', type=str,
help='filename of text to evaluate on')
args = parser.parse_args()
eval(args)
def eval(args):
with open(os.path.join(args.save_dir, 'config.pkl'), 'rb') as f:
saved_args = cPickle.load(f)
saved_args.batch_size = 1
saved_args.seq_length = 200
with open(os.path.join(args.save_dir, 'chars_vocab.pkl'), 'rb') as f:
chars, vocab = cPickle.load(f)
model = Model(saved_args, infer=False, evaluation=True)
with codecs.open(args.text, 'r', encoding='utf-8') as f:
text = f.read()
with tf.Session() as sess:
tf.initialize_all_variables().run()
saver = tf.train.Saver(tf.all_variables())
ckpt = tf.train.get_checkpoint_state(args.save_dir)
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
ppl = model.eval(sess, chars, vocab, text)
print('perplexity: {0}'.format(ppl))
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
cedcb016825d6154f200e7ccfe1d53847c674bb6
|
9512f329d9326ed0b9436202947de9eee0c6c387
|
/Cap07-estruturas_de_repeticao/for_03.py
|
dcfe65ac3989ce9551d9ca9f6b3a0ccdf9f82056
|
[] |
no_license
|
frclasso/CodeGurus_Python_mod1-turma1_2019
|
9fffd76547256ac480db41536223682a5b152944
|
e34d60498ee45566dbf1182551d91250a9aab272
|
refs/heads/master
| 2020-04-30T02:01:50.757611 | 2019-06-10T15:38:58 | 2019-06-10T15:38:58 | 176,546,912 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 687 |
py
|
# loop aninhado
lot_2D = [
['Toyota', 'Audi', 'BMW'],
['Lexus', 'Jeep'],
['Honda', 'Kia', 'Mazda']
]
#print(lot_2D[0][0]) # lista 0 , elemento 0 da lista 0
# for linha in lot_2D:
# for carro in linha:
# print(carro)
lot_3D =[
[
['Tesla', 'Fiat', 'BMW'],
['Honda', 'Jeep'],
['Saab','Kia', 'Ford']
],
[
['Subaru', 'Nissan'],
['Volkswagen'],
['Mercedez']
],
[
['Chevrolet', 'GMC'],
['Ferrari', 'Lamborghini']
]
]
#print(lot_3D[0])
# print(lot_3D[0][0])
#print(lot_3D[0][0][1])
for grupo in lot_3D:
for line in grupo:
for carro in line:
print(carro)
|
[
"[email protected]"
] | |
5ea3abe3100127da5d59957fa8e7d512baa17b7f
|
9a7b7f90aa62ce52643e2df83d8aef7ba7803afd
|
/src/input_handlers/inventory_drop_handler.py
|
f467da4003f1c32e9ad309fed15c3b0a08d53594
|
[] |
no_license
|
voidnologo/tcod_roguelike
|
d82a060a94784a18156fefe105a3e26a540525e9
|
23d806f960134c17ccbd4e6ca5527f35e654df65
|
refs/heads/main
| 2023-02-28T11:05:52.809161 | 2021-02-07T16:36:38 | 2021-02-07T16:36:38 | 331,168,875 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 336 |
py
|
import actions
from input_handlers.inventory_event_handler import InventoryEventHandler
class InventoryDropHandler(InventoryEventHandler):
"""
Handle dropping an inventory item
"""
TITLE = 'Select an item to drop'
def on_item_selected(self, item):
return actions.DropItemAction(self.engine.player, item)
|
[
"(none)"
] |
(none)
|
e5fc3b96e27fd580d60715aa2a9faae4dfa0113f
|
7f7ba9fa96eb7741e3a7956aae439802376099d1
|
/excel_sync/db/models/mixins.py
|
27dc97215b789f0bf7f1c8a652037d36b68db776
|
[
"BSD-3-Clause"
] |
permissive
|
FriedrichK/django-excel-sync
|
bd34911960fab6580985378da7427f7823163bf7
|
3e649231dcdd26b29278dc2e9563ad0ab67d9f1c
|
refs/heads/master
| 2021-01-02T14:46:34.420031 | 2014-04-21T09:59:47 | 2014-04-21T09:59:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,270 |
py
|
from excel_sync.db.models.fields import SpreadsheetMixin
class SpreadsheetModelMixin:
@staticmethod
def import_spreadsheet_data(klass):
source = get_spreadsheet_datasource(klass)
fields_with_spreadsheet_metadata = get_fields_with_spreadsheet_metadata(klass)
field_settings = build_field_settings(fields_with_spreadsheet_metadata)
data_for_rows = source.get_rows(field_settings)
for data_for_row in data_for_rows:
entry = klass(**data_for_row)
entry.save()
def get_spreadsheet_datasource(klass):
return klass._meta.spreadsheet_source
def get_fields_with_spreadsheet_metadata(klass):
all_fields = klass._meta.fields
fields_with_spreadsheet_metadata = []
for field in all_fields:
if(has_spreadsheet_metadata(field)):
fields_with_spreadsheet_metadata.append(field)
return fields_with_spreadsheet_metadata
def has_spreadsheet_metadata(field):
return isinstance(field, SpreadsheetMixin)
def build_field_settings(fields_with_spreadsheet_metadata):
field_settings = []
for field in fields_with_spreadsheet_metadata:
field_setting = field.get_spreadsheet_settings()
field_settings.append(field_setting)
return field_settings
|
[
"[email protected]"
] | |
ff93f4c4e03c2723185097e82af8b56b2598c151
|
16be53c2dc4eee5602d3f7a38c599917009fb802
|
/account/migrations/0002_remove_profile_address.py
|
02e0e499ef82da7cbf16355972f4610131d12c84
|
[] |
no_license
|
surajit003/mubango
|
35f37fb992782ae168a407922b494c3be0605e00
|
603e13cd07417d200330ca7292d9032af568a0b9
|
refs/heads/main
| 2023-03-28T07:59:57.967026 | 2021-03-27T09:34:46 | 2021-03-27T09:34:46 | 315,040,701 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 319 |
py
|
# Generated by Django 3.0.4 on 2021-01-12 21:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("account", "0001_initial"),
]
operations = [
migrations.RemoveField(
model_name="profile",
name="address",
),
]
|
[
"[email protected]"
] | |
2398d673bdef1187105868a48ac3e87d4555d20f
|
3c01d7928029e74a19d646f5a40b3bf099b281a7
|
/typeshed/stdlib/mimetypes.pyi
|
5a3ec91acbcdb71fb39f59656555c7233be0d66e
|
[
"MIT"
] |
permissive
|
arpancodes/protectsql
|
f3ced238c103fca72615902a9cb719c44ee2b5ba
|
6392bb7a86d1f62b86faf98943a302f7ea3fce4c
|
refs/heads/main
| 2023-08-07T16:33:57.496144 | 2021-09-24T19:44:51 | 2021-09-24T19:44:51 | 409,894,807 | 0 | 1 |
MIT
| 2021-09-24T19:44:52 | 2021-09-24T08:46:02 |
Python
|
UTF-8
|
Python
| false | false | 1,554 |
pyi
|
import sys
from _typeshed import StrPath
from typing import IO, Sequence, Tuple
if sys.version_info >= (3, 8):
def guess_type(url: StrPath, strict: bool = ...) -> Tuple[str | None, str | None]: ...
else:
def guess_type(url: str, strict: bool = ...) -> Tuple[str | None, str | None]: ...
def guess_all_extensions(type: str, strict: bool = ...) -> list[str]: ...
def guess_extension(type: str, strict: bool = ...) -> str | None: ...
def init(files: Sequence[str] | None = ...) -> None: ...
def read_mime_types(file: str) -> dict[str, str] | None: ...
def add_type(type: str, ext: str, strict: bool = ...) -> None: ...
inited: bool
knownfiles: list[str]
suffix_map: dict[str, str]
encodings_map: dict[str, str]
types_map: dict[str, str]
common_types: dict[str, str]
class MimeTypes:
suffix_map: dict[str, str]
encodings_map: dict[str, str]
types_map: Tuple[dict[str, str], dict[str, str]]
types_map_inv: Tuple[dict[str, str], dict[str, str]]
def __init__(self, filenames: Tuple[str, ...] = ..., strict: bool = ...) -> None: ...
def guess_extension(self, type: str, strict: bool = ...) -> str | None: ...
def guess_type(self, url: str, strict: bool = ...) -> Tuple[str | None, str | None]: ...
def guess_all_extensions(self, type: str, strict: bool = ...) -> list[str]: ...
def read(self, filename: str, strict: bool = ...) -> None: ...
def readfp(self, fp: IO[str], strict: bool = ...) -> None: ...
if sys.platform == "win32":
def read_windows_registry(self, strict: bool = ...) -> None: ...
|
[
"[email protected]"
] | |
b4891a5b540f1bd8e420aa57dab3d7ec38f825b6
|
232d0a99df3ad03ce7811b4e96ebb6982cc0f865
|
/aggregate.py
|
396c21a0343add8ef7e949e48692f0f5f77f2ad8
|
[] |
no_license
|
tomalrussell/aggregation-case-study
|
9e6db17e3a43f436dfce8cb84e608cb61b607239
|
d23217d94e61d89043baffcfd157883fc474b8ae
|
refs/heads/master
| 2020-06-23T03:46:44.340892 | 2016-11-24T11:23:43 | 2016-11-24T11:23:43 | 74,666,760 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,478 |
py
|
from __future__ import print_function
import argparse
from collections import namedtuple
import fiona
from shapely.geometry import shape
from rtree import index
def proportion_of_a_intersecting_b(a, b):
intersection = a.intersection(b)
return intersection.area / a.area
ShapeWithValue = namedtuple('ShapeWithValue', ['shape', 'value'])
def aggregate(input_file, output_file, reporting_geometry, reporting_initial_value, reporting_attribute, reporting_attribute_type):
input_features = []
idx = index.Index()
with fiona.drivers():
with fiona.open(input_file) as input_src:
for feature in input_src:
s = ShapeWithValue(
shape=shape(feature['geometry']),
value=feature['properties'][reporting_attribute]
)
input_features.append(s)
# Populate R-tree index with bounds of input features
for pos, feature in enumerate(input_features):
idx.insert(pos, feature.shape.bounds)
with fiona.open(reporting_geometry) as reporting_src:
sink_schema = reporting_src.schema.copy()
sink_schema['properties'][reporting_attribute] = reporting_attribute_type
with fiona.open(
output_file, 'w',
crs=reporting_src.crs,
driver="ESRI Shapefile",
schema=sink_schema) as reporting_sink:
for reporting_feature in reporting_src:
reporting_shape = shape(reporting_feature['geometry'])
reporting_value = reporting_initial_value
# look up bbox intersecting features in R-tree
intersecting_features = [input_features[pos] for pos in idx.intersection(reporting_shape.bounds)]
for input_feature in intersecting_features:
# find proportion of input feature that intersects
proportion = proportion_of_a_intersecting_b(input_feature.shape, reporting_shape)
# add that proportion of the attribute_to_report to the reporting_value
reporting_value = reporting_value + proportion * input_feature.value
print(reporting_value)
reporting_feature['properties'][reporting_attribute] = reporting_value
reporting_sink.write(reporting_feature)
def setup_parser():
"""Parse command line arguments
"""
parser = argparse.ArgumentParser(description='Aggregate a value from one geometry to another.')
parser.add_argument('-i', '--input-file',
required=True,
help='Path to the input file, containing the data to be aggregated.')
parser.add_argument('-o', '--output-file',
required=True,
help='Path to the output file.')
parser.add_argument('-rg', '--reporting-geometry',
required=True,
help='Path to the reporting geometry file, containing geometry to be used as output.')
parser.add_argument('-ri', '--reporting-initial-value',
required=True,
help='Initial value for the attribute to output (used if no geometries intersect)')
parser.add_argument('-ra', '--reporting-attribute',
required=True,
help='Attribute name')
parser.add_argument('-rt', '--reporting-attribute-type',
required=True,
choices=['int', 'str', 'float'],
help='Type of value (can be "int", "str" or "float")')
parsed_args = parser.parse_args()
if parsed_args.reporting_attribute_type == 'int':
parsed_args.reporting_initial_value = int(parsed_args.reporting_initial_value)
if parsed_args.reporting_attribute_type == 'str':
parsed_args.reporting_initial_value = str(parsed_args.reporting_initial_value)
if parsed_args.reporting_attribute_type == 'float':
parsed_args.reporting_initial_value = float(parsed_args.reporting_initial_value)
return parsed_args
if __name__ == '__main__':
args = setup_parser()
"""Example usage:
python aggregate.py \
-i data/oa/england_oa_2011_clipped_with_pop.shp \
-o data/grid_with_pop.shp \
-rg data/grid.shp \
-ri 0 -ra pop -rt int
"""
aggregate(
args.input_file,
args.output_file,
args.reporting_geometry,
args.reporting_initial_value,
args.reporting_attribute,
args.reporting_attribute_type
)
|
[
"[email protected]"
] | |
ba683fa2671b6bcd12fa5fce3c7356675c5f5a60
|
db697271157368eb39ee9d9479d0c6a7eb9d06dd
|
/virtual/bin/easy_install
|
f22e90e735bfeae51bc3a727bc7716715ebadcd1
|
[
"MIT"
] |
permissive
|
amoskipz/instagram
|
5edaf03fd784c44fb325dc9f294fab41acc7bc4c
|
120e5ef1213567297689e04d5b8620508ce18fea
|
refs/heads/master
| 2023-04-09T13:09:27.379801 | 2021-04-10T17:29:14 | 2021-04-10T17:29:14 | 352,740,611 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 278 |
#!/home/moringa/Desktop/amoz/amosinstagram/virtual/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"[email protected]"
] | ||
0571f647ae3ab197917c302a532b8449f7afe44d
|
f504253210cec1c4ec6c3ea50a45564db7d6cd7f
|
/scripts/parse_pyi.py
|
d4b35524274e7a8a2fb6d2ba5b401434ca6486d5
|
[
"MIT"
] |
permissive
|
phil65/PrettyQt
|
b1150cb4dce982b9b8d62f38f56694959b720a3e
|
f00500d992d1befb0f2c2ae62fd2a8aafba7fd45
|
refs/heads/master
| 2023-08-30T21:00:08.905444 | 2023-08-17T12:24:45 | 2023-08-17T12:24:45 | 177,451,205 | 17 | 5 |
MIT
| 2020-08-15T22:21:18 | 2019-03-24T18:10:21 |
Python
|
UTF-8
|
Python
| false | false | 1,049 |
py
|
import pathlib
import sys
from typed_ast import ast3
def add_parents(tree):
for node in ast3.walk(tree):
for child in ast3.iter_child_nodes(node):
child.parent = node # type: ignore
def find_enums(tree):
for node in ast3.walk(tree):
if not isinstance(node, ast3.Assign):
continue
if node.type_comment is None:
continue
if "." not in node.type_comment:
continue
if not node.type_comment.startswith("'"):
continue
comment = node.type_comment.strip("'")
mod, cls = comment.rsplit(".", maxsplit=1)
assert len(node.targets) == 1
name = node.targets[0].id # type: ignore
yield (mod, cls, name)
def main():
for filename in sys.argv[1:]:
tree = ast3.parse(pathlib.Path(filename).read_text())
for mod, cls, name in find_enums(tree):
old = f"{mod}.{name}"
new = f"{mod}.{cls}.{name}"
print(f"{old} {new}")
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
59de5f4d684f1fcbb412201731cec5a17abe4081
|
08f42d68aaea64c986a532b86562365f2a6fe3b5
|
/src/addlayouttable.py
|
3e32ab12ae34fdcd43ccd0ad5cf36d01a2dfbff6
|
[
"MIT"
] |
permissive
|
sos82/micropython-microbit-v2
|
da65da0c4ae8a3e2189bb598f75fec43d4628007
|
a44573c623a9e935257c8db51a3f0c2e75ea10aa
|
refs/heads/master
| 2023-07-19T07:03:40.101099 | 2021-09-11T15:13:42 | 2021-09-11T15:13:42 | 402,835,132 | 0 | 0 |
MIT
| 2021-09-11T15:13:43 | 2021-09-03T16:35:34 | null |
UTF-8
|
Python
| false | false | 7,683 |
py
|
#!/usr/bin/env python3
"""
Add a flash layout table to a hex firmware for MicroPython on the micro:bit.
Usage: ./addlayouttable.py <firmware.hex> <firmware.map> [-o <combined.hex>]
Output goes to stdout if no filename is given.
The layout table is a sequence of 16-byte entries. The last entry contains the
header (including magic numbers) and is aligned to the end of a page such that
the final byte of the layout table is the final byte of the page it resides in.
This is so it can be quickly and easily searched for.
The layout table has the following format. All integer values are unsigned and
store little endian.
0x00 0x01 0x02 0x03 0x04 0x05 0x06 0x07 0x08 0x09 0x0a 0x0b 0x0c 0x0d 0x0e 0x0f
ID HT REG_PAGE REG_LEN HASH_DATA
(additional regions)
...
MAGIC1 VERSION TABLE_LEN NUM_REG PSIZE_LOG2 MAGIC2
The values are:
ID - 1 byte - region id for this entry, defined by the region
HT - 1 byte - hash type of the region hash data
REG_PAGE - 2 bytes - starting page number of the region
REG_LEN - 4 bytes - length in bytes of the region
HASH_DATA - 8 bytes - data for the hash of this region
HT=0: hash data is empty
HT=1: hash data contains 8 bytes of verbatim data
HT=2: hash data contains a 4-byte pointer to a string
MAGIC1 - 4 bytes - 0x597F30FE
VERSION - 2 bytes - table version (currently 1)
TABLE_LEN - 2 bytes - length in bytes of the table excluding this header row
NUM_REG - 2 bytes - number of regions
PSIZE_LOG2 - 2 bytes - native page size of the flash, log-2
MAGIC2 - 4 bytes - 0xC1B1D79D
"""
import argparse
import binascii
import struct
import sys
IHEX_TYPE_DATA = 0
IHEX_TYPE_EXT_LIN_ADDR = 4
NRF_PAGE_SIZE_LOG2 = 12
NRF_PAGE_SIZE = 1 << NRF_PAGE_SIZE_LOG2
class FlashLayout:
MAGIC1 = 0x597F30FE
MAGIC2 = 0xC1B1D79D
VERSION = 1
REGION_HASH_NONE = 0
REGION_HASH_DATA = 1
REGION_HASH_PTR = 2
def __init__(self):
self.data = b""
self.num_regions = 0
def add_region(
self, region_id, region_addr, region_len, region_hash_type, region_hash=None
):
# Compute/validate the hash data.
if region_addr % NRF_PAGE_SIZE != 0:
assert 0, region_addr
if region_hash_type == FlashLayout.REGION_HASH_NONE:
assert region_hash is None
region_hash = b"\x00" * 8
elif region_hash_type == FlashLayout.REGION_HASH_DATA:
assert len(region_hash) == 8
elif region_hash_type == FlashLayout.REGION_HASH_PTR:
region_hash = struct.pack("<II", region_hash, 0)
# Increase number of regions.
self.num_regions += 1
# Add the region data.
self.data += struct.pack(
"<BBHI8s",
region_id,
region_hash_type,
region_addr // NRF_PAGE_SIZE,
region_len,
region_hash,
)
def finalise(self):
# Add padding to data to align it to 16 bytes.
if len(self.data) % 16 != 0:
self.data += b"\xff" * 16 - len(self.data) % 16
# Add 16-byte "header" at the end with magic numbers and meta data.
self.data += struct.pack(
"<IHHHHI",
FlashLayout.MAGIC1,
FlashLayout.VERSION,
len(self.data),
self.num_regions,
NRF_PAGE_SIZE_LOG2,
FlashLayout.MAGIC2,
)
def make_ihex_record(addr, type, data):
record = struct.pack(">BHB", len(data), addr & 0xFFFF, type) + data
checksum = (-(sum(record))) & 0xFF
return ":%s%02X" % (str(binascii.hexlify(record), "utf8").upper(), checksum)
def parse_map_file(filename, symbols):
parse_symbols = False
with open(filename) as f:
for line in f:
line = line.strip()
if line == "Linker script and memory map":
parse_symbols = True
elif parse_symbols and line.startswith("0x00"):
line = line.split()
if len(line) >= 2 and line[1] in symbols:
symbols[line[1]] = int(line[0], 16)
def output_firmware(dest, firmware, layout_addr, layout_data):
# Output head of firmware.
for line in firmware[:-2]:
print(line, end="", file=dest)
# Output layout data.
print(
make_ihex_record(
0,
IHEX_TYPE_EXT_LIN_ADDR,
struct.pack(">H", layout_addr >> 16),
),
file=dest,
)
for i in range(0, len(layout_data), 16):
chunk = layout_data[i : min(i + 16, len(layout_data))]
print(
make_ihex_record(layout_addr + i, IHEX_TYPE_DATA, chunk),
file=dest,
)
# Output tail of firmware.
print(firmware[-2], end="", file=dest)
print(firmware[-1], end="", file=dest)
def main():
arg_parser = argparse.ArgumentParser(
description="Add UICR region to hex firmware for the micro:bit."
)
arg_parser.add_argument(
"-o",
"--output",
default=sys.stdout,
type=argparse.FileType("wt"),
help="output file (default is stdout)",
)
arg_parser.add_argument("firmware", nargs=1, help="input MicroPython firmware")
arg_parser.add_argument(
"mapfile",
nargs=1,
help="input map file",
)
args = arg_parser.parse_args()
# Read in the firmware from the given hex file.
with open(args.firmware[0], "rt") as f:
firmware = f.readlines()
# Parse the linker map file, looking for the following symbols.
symbols = {
key: None
for key in [
"_binary_softdevice_bin_start",
"__isr_vector",
"__etext",
"__data_start__",
"__data_end__",
"_fs_start",
"_fs_end",
"microbit_version_string",
]
}
parse_map_file(args.mapfile[0], symbols)
# Get the required symbol addresses.
sd_start = symbols["_binary_softdevice_bin_start"]
sd_end = symbols["__isr_vector"]
mp_start = symbols["__isr_vector"]
data_len = symbols["__data_end__"] - symbols["__data_start__"]
mp_end = symbols["__etext"] + data_len
mp_version = symbols["microbit_version_string"]
fs_start = symbols["_fs_start"]
fs_end = symbols["_fs_end"]
# Make the flash layout information table.
layout = FlashLayout()
layout.add_region(1, sd_start, sd_end - sd_start, FlashLayout.REGION_HASH_NONE)
layout.add_region(
2, mp_start, mp_end - mp_start, FlashLayout.REGION_HASH_PTR, mp_version
)
layout.add_region(3, fs_start, fs_end - fs_start, FlashLayout.REGION_HASH_NONE)
layout.finalise()
# Compute layout address.
layout_addr = (
((mp_end >> NRF_PAGE_SIZE_LOG2) << NRF_PAGE_SIZE_LOG2)
+ NRF_PAGE_SIZE
- len(layout.data)
)
if layout_addr < mp_end:
layout_addr += NRF_PAGE_SIZE
if layout_addr >= fs_start:
print("ERROR: Flash layout information overlaps with filesystem")
sys.exit(1)
# Print information.
if args.output is not sys.stdout:
fmt = "{:13} 0x{:05x}..0x{:05x}"
print(fmt.format("SoftDevice", sd_start, sd_end))
print(fmt.format("MicroPython", mp_start, mp_end))
print(fmt.format("Layout table", layout_addr, layout_addr + len(layout.data)))
print(fmt.format("Filesystem", fs_start, fs_end))
# Output the new firmware as a hex file.
output_firmware(args.output, firmware, layout_addr, layout.data)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
67b2a9dd24d52509e01bfca824c18c8a25229ef6
|
52e2224538bfa7e69f9e0688dc49c53a247070d7
|
/app/api/serializers/offers.py
|
5a8539cca5bad81d59223205eb8855f955c825cc
|
[] |
no_license
|
averdier/epsi_my_learning_chain_api
|
2958f8ab6333e87f8b5d1a97f8d485361cdcba9d
|
ea85dced579f6285f3acd0edd0d64ead4f6f2332
|
refs/heads/master
| 2022-12-09T19:56:27.836637 | 2018-04-13T09:23:46 | 2018-04-13T09:23:46 | 129,227,310 | 1 | 0 | null | 2022-12-08T01:02:30 | 2018-04-12T09:32:18 |
Python
|
UTF-8
|
Python
| false | false | 1,666 |
py
|
# -*- coding: utf-8 -*-
from flask_restplus import fields
from .nested import facilitator_nested, api
offer_post_model = api.model('Offer POST model', {
'name': fields.String(required=True, min_length=4, description='Name'),
'tags': fields.List(fields.String(), required=False, description='Tags'),
'price': fields.Integer(required=True, min=0, description='Price'),
'description': fields.String(required=False, description='Description')
})
offer_patch_model = api.model('Offer PATCH model', {
'name': fields.String(required=False, min_length=4, description='Name'),
'tags': fields.List(fields.String(), required=False, description='Tags'),
'price': fields.Integer(required=False, min=0, description='Price'),
'description': fields.String(required=False, description='Description')
})
offer_minimal_model = api.model('Offer minimal model', {
'id': fields.String(required=True, description='Offer ID'),
'facilitator_id': fields.String(required=True, description='Facilitator ID', attribute=lambda o: o.facilitator.id),
'name': fields.String(required=True, description='Name'),
'tags': fields.List(fields.String(), required=True, description='Tags'),
'price': fields.Integer(required=True, description='Price')
})
offer_model = api.inherit('Offer model', offer_minimal_model, {
'description': fields.String(required=True, description='Description'),
'facilitator': fields.Nested(facilitator_nested, required=True, description='Facilitator')
})
offer_container = api.model('Offer container', {
'offers': fields.List(fields.Nested(offer_minimal_model), required=True, description='Offers list')
})
|
[
"[email protected]"
] | |
68bce261a32438af944cc7a39d301fd1bed0c9b1
|
e260e72c6d8f24ef250173025b2ecf129960cf03
|
/src/python/py27hash/key.py
|
34b6e925ab62923b6e110a518a7371a6aa5cb145
|
[
"MIT"
] |
permissive
|
neuml/py27hash
|
1cdce30b9480ff338ca0488c2d80a196fc783370
|
4d814de4ab616f33bb2d74c687e74fa57c399a56
|
refs/heads/master
| 2022-11-23T21:21:56.220330 | 2022-11-20T12:45:21 | 2022-11-20T12:45:21 | 223,510,843 | 11 | 3 |
MIT
| 2022-03-27T14:46:02 | 2019-11-23T00:57:33 |
Python
|
UTF-8
|
Python
| false | false | 5,425 |
py
|
"""
Compatibility methods to support Python 2.7 style key iteration in Python 3.X+
This is designed for compatibility not performance.
"""
import ctypes
# pylint: disable = E0401
from .hash import Hash
class Keys(object):
"""
Compatibility class to support Python 2.7 style iteration in Python 3.X+
Logic ported from the 2.7 Python branch: cpython/Objects/dictobject.c
Logic ported from the 2.7 Python branch: cpython/Objects/setobject.c
"""
# Min dict size
MINSIZE = 8
# Hash collisions
PERTURB_SHIFT = 5
def __init__(self):
"""
Initializes a keys object.
"""
self.keylist = []
self.keysort = None
# Python 2 dict default size
self.mask = Keys.MINSIZE - 1
def __setstate__(self, state):
"""
Overrides default pickling object to force re-adding all keys and match Python 2.7 deserialization logic.
Args:
state: input state
"""
self.__dict__ = state
keys = self.keys()
# Clear keys and re-add to match deserialization logic
self.__init__()
for k in keys:
self.add(k)
def __iter__(self):
"""
Default iterator.
Returns:
iterator
"""
return iter(self.keys())
def keys(self):
"""
Returns keys ordered using Python 2.7's iteration algorithm.
Method: static PyDictEntry *lookdict(PyDictObject *mp, PyObject *key, register long hash)
Returns:
list of keys
"""
if not self.keysort:
keys = []
hids = set()
for k in self.keylist:
# C API uses unsigned values
h = ctypes.c_size_t(Hash.hash(k)).value
i = h & self.mask
hid = i
perturb = h
while hid in hids:
i = (i << 2) + i + perturb + 1
hid = i & self.mask
perturb >>= Keys.PERTURB_SHIFT
keys.append((hid, k))
hids.add(hid)
# Cache result - performance - clear if more keys added
self.keysort = [v for (k, v) in sorted(keys, key=lambda x: x[0])]
return self.keysort
def add(self, key):
"""
Called each time a new item is inserted. Tracks via insertion order and will maintain the same order
as a dict in Python 2.7.
Method: static int dict_set_item_by_hash_or_entry(register PyObject *op, PyObject *key, long hash,
PyDictEntry *ep, PyObject *value)
Args:
key: key to add
"""
# Add key to list. If this is a replace/update then size won't change.
if key and key not in self.keylist:
# Append key to list
self.keylist.append(key)
# Clear cached keys
self.keysort = None
# Resize dict if 2/3 capacity
if len(self.keylist) * 3 >= ((self.mask + 1) * 2):
# Reset key list to simulate the dict resize + copy operation
self.keylist = self.keys()
self.keysort = None
self.setMask()
def remove(self, key):
"""
Remove a key from the backing list.
Args:
key: key to remove
"""
if key in self.keylist:
# Remove key from list
self.keylist.remove(key)
# Clear cached keys
self.keysort = None
def merge(self, d):
"""
Merges keys from an existing iterable into this key list.
Method: int PyDict_Merge(PyObject *a, PyObject *b, int override)
Args:
d: input dict
"""
# PyDict_Merge initial merge size is double the size of the current + incoming dict
if (len(self.keylist) + len(d)) * 3 >= (self.mask + 1) * 2:
self.setMask((len(self.keylist) + len(d)) * 2)
# Copy actual keys
for k in d:
self.add(k)
def copy(self):
"""
Makes a copy of self.
Method: PyObject *PyDict_Copy(PyObject *o)
Returns:
copy of self
"""
# Copy creates a new object and merges keys in
new = Keys()
new.merge(self.keys())
return new
def pop(self):
"""
Pops the top element from the sorted keys if it exists. Returns None otherwise.
Method: static PyObject *dict_popitem(PyDictObject *mp)
Return:
top element or None if Keys is empty
"""
if self.keylist:
# Pop the top element
value = self.keys()[0]
self.remove(value)
return value
return None
def setMask(self, request=None):
"""
Key based on the total size of this dict. Matches ma_mask in Python 2.7's dict.
Method: static int dictresize(PyDictObject *mp, Py_ssize_t minused)
"""
if not request:
length = len(self.keylist)
# Python 2 dict increases by a factor of 4 for small dicts, 2 for larger ones
request = length * (2 if length > 50000 else 4)
newsize = Keys.MINSIZE
while newsize <= request:
newsize <<= 1
self.mask = newsize - 1
|
[
"[email protected]"
] | |
fc451ae5171ff6413eb5371ac7f7c792cf544866
|
e874e3b4312b2beebaa42fa1489b50c618055190
|
/venv/Lib/site-packages/onyx/core/datatypes/rdate.py
|
96d0f5c2af82ec4900dac49253b490f6f1a8a386
|
[] |
no_license
|
CarlosDinart/PUC-SP
|
611a9acb6a82b7db2174d2d439b5666db48a530e
|
5f5f1ea4b9c55c7d20b2dcd92c461b3d8ebbb664
|
refs/heads/master
| 2023-01-23T06:46:42.492764 | 2020-12-09T19:41:01 | 2020-12-09T19:41:01 | 320,058,535 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,973 |
py
|
###############################################################################
#
# Copyright: (c) 2015 Carlo Sbraccia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###############################################################################
from .date import Date
from .holiday_cal import HolidayCalendar
from dateutil.relativedelta import relativedelta, MO, WE, FR
import datetime
import re
__all__ = ["RDate"]
QUARTER_FIRST_MTH = [1, 1, 1, 4, 4, 4, 7, 7, 7, 10, 10, 10]
SPLITTER = re.compile("([\+,\-]\d*\w+)")
OPERANDS = {"+", "-"}
###############################################################################
class RDate(object):
"""
A date shift object that can be added to Dates to generate shifted dates.
"""
__slots__ = ("date_rule", "calendar")
# -------------------------------------------------------------------------
def __init__(self, date_rule, calendar=None):
"""
Inputs:
date_rule - a string specifying relative shift (see below for valid
date rules).
calendar - a holiday calendar used to identify business days
Rule definitions:
d = add calendar day
b = add business day
w = add calendar week
m = add calendar month
y = add calendar year
c = go to the required day in the month
e = go to end of month (ignores num)
J = go to first calendar day of month (ignores num)
M = go to closest Monday as specified by num
W = go to closest Wednesday as specified by num
F = go to closest Friday as specified by num
q = go to beginning of the quarter (ignores num)
Q = go to end of the quarter (ignores num)
A = go to beginning of the year (ignores num)
E = go to end of the year (ignores num)
"""
# --- use parent class setattr because RDate is implemented as an
# immutable class
super().__setattr__("date_rule", date_rule)
super().__setattr__("calendar", calendar or HolidayCalendar())
# -------------------------------------------------------------------------
def __setattr__(self, attr, value):
raise AttributeError("attribute '{0:s}' of RDate is not settable "
"as RDate is an immutable class".format(attr))
# -------------------------------------------------------------------------
def apply_rule(self, d):
# --- rule processing. If no operator is defined assume it's "+"
if self.date_rule[0] in OPERANDS:
atomic = SPLITTER.split(self.date_rule)[1::2]
else:
atomic = SPLITTER.split("+" + self.date_rule)[1::2]
# --- iteratively apply each atomic rule
for rule in atomic:
op = rule[0:-1]
r = rule[-1]
if op in OPERANDS:
op += "1"
# --- look for the proper rule to apply
if r == "d":
d += relativedelta(days=int(op))
elif r == "b":
nb = int(op[1:])
op1 = int(op[0] + "1")
if nb == 0 and self.calendar.is_holiday(d):
# --- go to the next (or previous) business day only if
# d is not already a business day
nb = 1
for i in range(nb):
d += relativedelta(days=op1)
while self.calendar.is_holiday(d):
d += relativedelta(days=op1)
elif r == "w":
d += relativedelta(weeks=int(op))
elif r == "m":
d += relativedelta(months=int(op))
elif r == "y":
d += relativedelta(years=int(op))
elif r == "c":
d += relativedelta(day=int(op))
elif r == "e":
d += relativedelta(day=31)
elif r == "J":
d += relativedelta(day=1)
elif r == "M":
d += relativedelta(weekday=MO(int(op)))
elif r == "W":
d += relativedelta(weekday=WE(int(op)))
elif r == "F":
d += relativedelta(weekday=FR(int(op)))
elif r == "q":
d = d.replace(day=1, month=QUARTER_FIRST_MTH[d.month-1])
elif r == "Q":
d = d.replace(day=1, month=QUARTER_FIRST_MTH[d.month-1]+2)
d += relativedelta(day=31)
elif r == "A":
d = d.replace(day=1, month=1)
elif r == "E":
d = d.replace(day=31, month=12)
else:
raise NameError("Atomic rule {0:s} is unknown. "
"Full rule is {1:s}".format(r, rule))
# --- conversion to Date is needed here because applying a
# relativedelta to a Date returns a datetime object
return Date.parse(d)
# -------------------------------------------------------------------------
# relative date algebra
def __radd__(self, date):
# --- check against the supercalss datetime.datetime
if not isinstance(date, (datetime.date, datetime.datetime)):
raise ValueError("RDate can only be applied to a Date. "
"{0!s} was passed instead".format(date.__class__))
return self.apply_rule(date)
|
[
"[email protected]"
] | |
6828a845e8b4a33abe9c434db3f96ee98f8001fa
|
36b624c0b7f0e691772f7521695c02f0709f9f89
|
/day8.py
|
cc9d00263a5a71ed2da284db2e9df17031b7dd4f
|
[] |
no_license
|
alex-huff/advent-of-code-2020
|
4ee30fdcd6b67aceb0c0fb919de9a18f61d9987c
|
701cfcba1c952710c0bf0b9336f670141a9ac276
|
refs/heads/main
| 2023-02-02T13:51:37.722815 | 2020-12-22T01:32:21 | 2020-12-22T01:32:21 | 323,081,866 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,564 |
py
|
def getValueBeforeLoop(code):
acc = 0
pc = 0
executionSet = set()
finished = False
while True:
if pc == len(code) - 1:
finished = True
break
if pc in executionSet:
break
inst = code[pc][0]
value = code[pc][1]
executionSet.add(pc)
if inst == NOP:
pc += 1
elif inst == ACC:
acc += value
pc += 1
elif inst == JMP:
pc += value
return (acc, finished)
def getInstruction(line, iSet):
operation = iSet.index(line[:line.index(' ')])
value = int(line[line.index(' ') + 1:])
return [operation, value]
raw_input = []
NOP = 0
ACC = 1
JMP = 2
instructionSet = ['nop', 'acc', 'jmp']
with open('input/day8input.txt') as file:
raw_input = [line.rstrip() for line in file]
code = [getInstruction(line, instructionSet) for line in raw_input]
# part 1
print(getValueBeforeLoop(code))
# part 2
for i, operation in enumerate(code):
if operation[0] == NOP:
operation[0] = JMP
result = getValueBeforeLoop(code)
if result[1] == True:
print(i, result[0], operation[0], operation[1])
break
else:
# revert
operation[0] = NOP
elif operation[0] == JMP:
operation[0] = NOP
result = getValueBeforeLoop(code)
if result[1] == True:
print(i, result[0], operation[0], operation[1])
break
else:
# revert
operation[0] = JMP
|
[
"[email protected]"
] | |
301d507040d644d6ddc8e77d11c75c42c9c382f2
|
81fe7f2faea91785ee13cb0297ef9228d832be93
|
/HackerRank/Contests/101Hack42/cutting_paper_squares.py
|
fc51e8c2e28f90db86021f38bf68df6bf3be5567
|
[] |
no_license
|
blegloannec/CodeProblems
|
92349c36e1a35cfc1c48206943d9c2686ea526f8
|
77fd0fa1f1a519d4d55265b9a7abf12f1bd7d19e
|
refs/heads/master
| 2022-05-16T20:20:40.578760 | 2021-12-30T11:10:25 | 2022-04-22T08:11:07 | 54,330,243 | 5 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 143 |
py
|
#!/usr/bin/env python
import sys
# any way of cutting is minimal and uses n*m-1 cuts
n,m = map(int,sys.stdin.readline().split())
print n*m-1
|
[
"[email protected]"
] | |
300abebbed2333e357357a47e73f19850524efd9
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/392/usersdata/310/71048/submittedfiles/formula.py
|
ea9ddc63827097528e7fa6c3641fd8e29889c94f
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 208 |
py
|
# -*- coding: utf-8 -*-
# STARTING
p = float(input('Digite o valor de p: '))
i = float(input('Digite o valor de i: '))
n = float(input('Digite o valor de n: '))
v = p*((((1+i)**n)-1)/i)
print ('%.2f' %v )
|
[
"[email protected]"
] | |
513c734e5f42fa3c0906eb3309cba7ef169d6c1b
|
a7da58ad91b007b3650003708eb91928f1e3684a
|
/bt5/erp5_pdm/SkinTemplateItem/portal_skins/erp5_pdm/SupplyLine_init.py
|
c4b6e6d09c91538afb94bb6ecf72091943ac25a9
|
[] |
no_license
|
jgpjuniorj/j
|
042d1bd7710fa2830355d4312a6b76103e29639d
|
dc02bfa887ffab9841abebc3f5c16d874388cef5
|
refs/heads/master
| 2021-01-01T09:26:36.121339 | 2020-01-31T10:34:17 | 2020-02-07T04:39:18 | 239,214,398 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 569 |
py
|
# XXX Set a resource for the supply line inside a resource
parent_value = context.getParentValue()
if parent_value.getPortalType() in context.getPortalResourceTypeList():
context.setResourceValue(parent_value)
# Predicate ?
context.setMappedValuePropertyList([
'base_price', 'additional_price',
'discount_ratio', 'exclusive_discount_ratio',
'surcharge_ratio', 'variable_additional_price',
'non_discountable_additional_price',
'priced_quantity', 'base_unit_price',
])
|
[
"[email protected]"
] | |
2d157e6b043c49bbf4392ba1010da1ab43617f94
|
1698fe3ff15a6737c70501741b32b24fe68052f4
|
/py-elasticsearch-django-master/spider/ESearch/spiders/haoyang_spider.py
|
a227d4b4b41603f8a992a695342ceb534df4a4bb
|
[] |
no_license
|
menhswu/djangoapps
|
4f3718244c8678640af2d2a095d20a405e337884
|
039a42aa9d1537e7beb4071d86bea7a42253d8b3
|
refs/heads/master
| 2023-03-04T03:56:01.070921 | 2021-01-28T07:35:02 | 2021-01-28T07:35:02 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,937 |
py
|
# -*- coding:utf-8 -*-
import scrapy
import re
import urllib2
from scrapy.http import Request
from scrapy import Selector
from ESearch.items import XiangmuItem
from ESearch.utils.common import get_md5
# 32406
class DmozSpider(scrapy.Spider):
name = "haoyang"
start_urls = []
main_url = "http://www.9lizhi.com"
def start_requests(self):
file_object = open(r'haoyang_url.csv', 'r')
try:
for line in file_object:
x = line.strip()
self.start_urls.append(x)
for url in self.start_urls:
yield self.make_requests_from_url(url)
finally:
file_object.close()
def parse(self, response):
item = XiangmuItem()
item["book_name"] = ''
item["book_author"] = ''
item["book_type"] = ''
item["book_format"] = ''
item["book_time"] = ''
item["book_url"] = ''
item["book_size"] = ''
item["book_downl_url"] = ''
item["book_source"] = ''
item["book_intro"] = ''
item["book_content"] = ''
item["book_zip_pswd"] = ''
item["book_chinese"] = ''
item["book_id"] = ''
selector = Selector(response)
is_lists_page = selector.xpath('//ul[@id="resultsContainer"]')
if is_lists_page:
info_lists = is_lists_page.xpath('li/div[@class="item_title"]/strong/h2/a/@href').extract()
for each in info_lists:
yield Request(each, callback=self.parse)
page_lists = is_lists_page.xpath('//select[@name="select"]/option/@value').extract()
for each_page in page_lists[1:-1]:
yield Request(self.main_url + each_page, callback=self.parse)
pass
is_info_page = selector.xpath('//div[@id="detail"]')
if is_info_page:
item['book_url'] = response.url
item['book_id'] = get_md5(response.url)
item['book_downl_url'] = response.url
type = selector.xpath('//div[@class="posi"]/a/text()').extract()
type_url = selector.xpath('//div[@class="posi"]/a/@href').extract()
if "http://www" in type_url[-1]:
item['book_type'] = type[-2]
else:
item['book_type'] = type[-1]
information = is_info_page.xpath('div[@class="tb-detail-hd"]')
item['book_name'] = information.xpath('h1/text()').extract()
time = information.xpath('li[@class="dated"]/span[@class="datetime"]/text()').extract()
time = ''.join(time).split(':')[-1]
item['book_time'] = time
author = information.xpath('li[@class="dated"]/span[@class="author"]/text()').extract()
item['book_author'] = ''.join(author).replace('\r', '').replace('\n', '')
yield item
|
[
"[email protected]"
] | |
44b3f8b9bf6336102b52df080e47ad6e0a0e1c8d
|
297497957c531d81ba286bc91253fbbb78b4d8be
|
/third_party/python/gyp/test/win/gyptest-quoting-commands.py
|
c6e3167e76c582ad10c5852afaa2c21f62439533
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
marco-c/gecko-dev-comments-removed
|
7a9dd34045b07e6b22f0c636c0a836b9e639f9d3
|
61942784fb157763e65608e5a29b3729b0aa66fa
|
refs/heads/master
| 2023-08-09T18:55:25.895853 | 2023-08-01T00:40:39 | 2023-08-01T00:40:39 | 211,297,481 | 0 | 0 |
NOASSERTION
| 2019-09-29T01:27:49 | 2019-09-27T10:44:24 |
C++
|
UTF-8
|
Python
| false | false | 565 |
py
|
"""
Make sure batch files run as actions. Regression test for previously missing
trailing quote on command line. cmd typically will implicitly insert a missing
quote, but if the command ends in a quote, it will not insert another, so the
command can sometimes become unterminated.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'batch-file-action'
test.run_gyp('batch-file-action.gyp', chdir=CHDIR)
test.build('batch-file-action.gyp', test.ALL, chdir=CHDIR)
test.pass_test()
|
[
"[email protected]"
] | |
e60bc54cc535f4ab73531ba9f4f5eac45fce071c
|
09145b3b07273e0e064a855cf2302220cebcf181
|
/textwrap_ex.py
|
95b08fe738e17f49e6c04d9e5cc134b0a91c315a
|
[] |
no_license
|
01x01/python3-standard-library
|
7fa762a12cbcb3535bd8b31128bd9c3aed167e10
|
1af424a04d3f16abf1c6bc42abf80ae357e35920
|
refs/heads/master
| 2020-06-24T12:23:38.563609 | 2019-08-12T09:42:34 | 2019-08-12T09:42:34 | 168,808,293 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,217 |
py
|
# coding: utf-8
# 文本段落格式化处理
# 主要功能: 类似文件编辑器覆盖或填充特性的编程功能
# 文本内容
sample_text = '''The textwrap module can be used to format text for output in
situations where pretty-printing is desired. It offers
programmatic functionality similar to the paragraph wrapping
or filling features found in many text editors.
'''
# textwrap 函数总结
# wrap(text,width=70,**kw) 输出一个list,每一行为list中的一个元素
# fill(text,width=70,**kw) 文本第一行缩进
# shorten(text,width,**kw) 摘要功能
# dedent(text) 移除缩进,空格
# indent(text,prefix,predicate=None)
# wrap
import textwrap
t1 = textwrap.wrap(sample_text)
print(t1)
# fill
t2 = textwrap.fill(sample_text,initial_indent="*"*4)
print(t2)
# shorten
t3 = textwrap.shorten(sample_text,width=10)
print(t3) # The [...]
# dedent
dedent_text = textwrap.dedent(sample_text)
print(dedent_text)
# indent
final = textwrap.indent(sample_text,'>')
print(final)
for width in [45,80]:
print("{} Columns is : \n".format(width))
t = textwrap.fill(sample_text,width=width)
print(t)
t5 = textwrap.fill(sample_text,subsequent_indent="*")
print(t5)
|
[
"[email protected]"
] | |
b741fb5ad71c5f866583c59b99471308f3687b26
|
f3fb46ec8167c3d7e451265a76e1646aef78233f
|
/world/stat_checks/utils.py
|
3d228a15fb3d369bed4b3c54c83fd1a965011308
|
[
"MIT"
] |
permissive
|
Arx-Game/arxcode
|
d1196941db4b551bb8ec96024241787cf4f34af3
|
363a1f14fd1a640580a4bf4486a1afe776757557
|
refs/heads/stable_orphan
| 2023-08-03T04:27:24.388330 | 2023-07-29T15:10:38 | 2023-07-29T15:10:38 | 144,421,010 | 52 | 45 |
MIT
| 2023-08-19T00:52:23 | 2018-08-11T22:06:07 |
Python
|
UTF-8
|
Python
| false | false | 685 |
py
|
"""
Just a few utilities. Should NOT import anything in global scope to avoid
circular imports.
"""
def get_check_by_name(name: str):
"""
Convenience method to avoid worrying about circular imports when
fetching checks.
"""
from world.stat_checks.models import StatCheck
check = StatCheck.get_instance_by_name(name)
if not check:
raise StatCheck.DoesNotExist(f"No check exists by name '{name}'")
return check
def get_check_maker_by_name(name: str, character, **kwargs):
from world.stat_checks.check_maker import DefinedCheckMaker
return DefinedCheckMaker(
character=character, check=get_check_by_name(name), **kwargs
)
|
[
"[email protected]"
] | |
e9f3f9f51a547f3f2aa65e33127282f566ad08a5
|
39b8aa964883b2bde4349e0c9c38e3233c310548
|
/src/Implement Queue using Stacks.py
|
421724c0e798d0e25106167e09fd34fd1f92ff3e
|
[] |
no_license
|
orifake/leetcode-python
|
053b82491e0b8d6197dd12d92eec5883211285db
|
8e375ebebe0a0285efefc33ed61afb22f41d0c75
|
refs/heads/master
| 2023-03-09T14:32:17.833456 | 2021-02-26T16:09:31 | 2021-02-26T16:09:31 | 264,466,829 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 846 |
py
|
class MyQueue:
def __init__(self):
"""
Initialize your data structure here.
"""
self.queue = []
def push(self, x: int) -> None:
"""
Push element x to the back of queue.
"""
return self.queue.append(x)
def pop(self) -> int:
"""
Removes the element from in front of queue and returns that element.
"""
return self.queue.pop(0)
def peek(self) -> int:
"""
Get the front element.
"""
return self.queue[0]
def empty(self) -> bool:
"""
Returns whether the queue is empty.
"""
return len(self.queue) == 0
# Your MyQueue object will be instantiated and called as such:
# obj = MyQueue()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.peek()
# param_4 = obj.empty()
|
[
"[email protected]"
] | |
800bfc292b541e358ca68c9e6c326a8a80714489
|
6581a1c0b04af75ab7d386597ec436bd4937b6df
|
/pystache/tests/main.py
|
7342c91712b8528bdc016eb6c3da6dde4a999216
|
[
"MIT"
] |
permissive
|
trenchmortar/pystache
|
f3ab3263ca0e990176306d3a0de9a4fba441c78f
|
cc262abf19cd90e34390d5ddb5db30d6f04620fa
|
refs/heads/master
| 2020-04-23T00:33:47.600879 | 2012-04-26T05:49:19 | 2012-04-26T05:49:19 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,685 |
py
|
# coding: utf-8
"""
Exposes a run_tests() function that runs all tests in the project.
This module is for our test console script.
"""
import os
import sys
import unittest
from unittest import TestProgram
import pystache
from pystache.tests.common import PACKAGE_DIR, PROJECT_DIR, SPEC_TEST_DIR, UNITTEST_FILE_PREFIX
from pystache.tests.common import get_module_names
from pystache.tests.doctesting import get_doctests
from pystache.tests.spectesting import get_spec_tests
# If this command option is present, then the spec test and doctest directories
# will be inserted if not provided.
FROM_SOURCE_OPTION = "--from-source"
def run_tests(sys_argv):
"""
Run all tests in the project.
Arguments:
sys_argv: a reference to sys.argv.
"""
should_source_exist = False
spec_test_dir = None
project_dir = None
if len(sys_argv) > 1 and sys_argv[1] == FROM_SOURCE_OPTION:
should_source_exist = True
sys_argv.pop(1)
# TODO: use logging module
print "pystache: running tests: expecting source: %s" % should_source_exist
try:
# TODO: use optparse command options instead.
spec_test_dir = sys_argv[1]
sys_argv.pop(1)
except IndexError:
if should_source_exist:
spec_test_dir = SPEC_TEST_DIR
try:
# TODO: use optparse command options instead.
project_dir = sys_argv[1]
sys_argv.pop(1)
except IndexError:
if should_source_exist:
project_dir = PROJECT_DIR
if len(sys_argv) <= 1 or sys_argv[-1].startswith("-"):
# Then no explicit module or test names were provided, so
# auto-detect all unit tests.
module_names = _discover_test_modules(PACKAGE_DIR)
sys_argv.extend(module_names)
if project_dir is not None:
# Add the current module for unit tests contained here.
sys_argv.append(__name__)
_PystacheTestProgram._text_doctest_dir = project_dir
_PystacheTestProgram._spec_test_dir = spec_test_dir
SetupTests.project_dir = project_dir
# We pass None for the module because we do not want the unittest
# module to resolve module names relative to a given module.
# (This would require importing all of the unittest modules from
# this module.) See the loadTestsFromName() method of the
# unittest.TestLoader class for more details on this parameter.
_PystacheTestProgram(argv=sys_argv, module=None)
# No need to return since unitttest.main() exits.
def _discover_test_modules(package_dir):
"""
Discover and return a sorted list of the names of unit-test modules.
"""
def is_unittest_module(path):
file_name = os.path.basename(path)
return file_name.startswith(UNITTEST_FILE_PREFIX)
names = get_module_names(package_dir=package_dir, should_include=is_unittest_module)
# This is a sanity check to ensure that the unit-test discovery
# methods are working.
if len(names) < 1:
raise Exception("No unit-test modules found--\n in %s" % package_dir)
return names
class SetupTests(unittest.TestCase):
"""Tests about setup.py."""
project_dir = None
def test_version(self):
"""
Test that setup.py's version matches the package's version.
"""
original_path = list(sys.path)
sys.path.insert(0, self.project_dir)
try:
from setup import VERSION
self.assertEqual(VERSION, pystache.__version__)
finally:
sys.path = original_path
# The function unittest.main() is an alias for unittest.TestProgram's
# constructor. TestProgram's constructor calls self.runTests() as its
# final step, which expects self.test to be set. The constructor sets
# the self.test attribute by calling one of self.testLoader's "loadTests"
# methods prior to callint self.runTests(). Each loadTest method returns
# a unittest.TestSuite instance. Thus, self.test is set to a TestSuite
# instance prior to calling runTests().
class _PystacheTestProgram(TestProgram):
"""
Instantiating an instance of this class runs all tests.
"""
def runTests(self):
# self.test is a unittest.TestSuite instance:
# http://docs.python.org/library/unittest.html#unittest.TestSuite
tests = self.test
if self._text_doctest_dir is not None:
doctest_suites = get_doctests(self._text_doctest_dir)
tests.addTests(doctest_suites)
if self._spec_test_dir is not None:
spec_testcases = get_spec_tests(self._spec_test_dir)
tests.addTests(spec_testcases)
TestProgram.runTests(self)
|
[
"[email protected]"
] | |
413503c12f4dc6734628644e7c8b0fb1e3e15539
|
aeec646a9a2feb6fbaac31d4548d9aa09ad125e3
|
/peer_module.py
|
879a6662404995a3a8da03457bae8f7f63f7be5e
|
[
"MIT"
] |
permissive
|
hslee1539/p2p
|
c0a9798e6da54029373ddf3d2b74ff30dc27e567
|
c472271eff409ef345f29ef32f562a5f5e00d3ba
|
refs/heads/master
| 2020-07-18T14:45:33.260168 | 2019-09-10T07:45:45 | 2019-09-10T07:45:45 | 206,264,727 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,674 |
py
|
import socket
import threading
import random
import p2p
class Peer:
""""""
ip : str
controlPort : int
connectionPorts : list
service : str
peerList : list
maxConnection : int
serverThreads : list
clientThreads : list
serverState : list
running : bool
controlSocket : socket.socket
controlThread : threading.Thread
def __init__(self, service : str, controlPort : int, connectionPorts : list, peerList = []):
self.peerList = peerList
self.service = service
self.controlPort = controlPort
self.connectionPorts = connectionPorts
self.maxConnection = len(connectionPorts)
self.ip = socket.gethostbyname(socket.getfqdn())
self.serverState = [False] * self.maxConnection
def start(self):
""""""
self.running = True
self.controlThread = threading.Thread(target=self._controlServer)
self.controlThread.start()
self.serverThreads = list(self._socketThreadGenerator(self._client))
self.clientThreads = list(self._socketThreadGenerator(self._client))
for serverThread in self.serverThreads:
serverThread.start()
for clientThread in self.clientThreads:
clientThread.start()
def _socketThreadGenerator(self, target : function):
for index in range(self.maxConnection):
yield threading.Thread(target=target, args=(index,))
def _findSleepServerIndex(self):
for i in range(self.maxConnection):
if(self.serverState[i] == False):
return i
return self.maxConnection
def _controlServer(self):
"""컨트롤 서버가 연결 유지가 가능한 포트를 클라이언트에 알려주도록 운영합니다."""
with socket.socket() as sock:
sock.bind((self.ip, self.controlPort))
sock.settimeout(1)
sock.listen()
while(self.running):
try:
clientSocket, address = sock.accept()
except socket.timeout:
continue
clientSocket : socket.socket
try:
clientSocket.sendall(self.service.encode())
retval = clientSocket.recv(1024)
if (self.service == retval.decode()):
clientSocket.sendall(str(self.connectionPorts[self._findSleepServerIndex()]).encode())
except socket.timeout:
pass
finally:
clientSocket.close()
def _server(self, index : int):
""""""
with socket.socket() as sock:
sock.bind((self.ip, self.connectionPorts[index]))
sock.settimeout(1)
sock.listen()
while(self.running):
try:
self.serverState[index] = False
clientSocket, address = sock.accept()
except socket.timeout:
continue
clientSocket : socket.socket
self.serverState[index] = True
try:
clientSocket.sendall(self.service.encode())
retval = clientSocket.recv()
if (self.service == retval.decode()):
except socket.timeout:
pass
finally:
clientSocket.close()
def _serverConnect(self):
while (self.running):
def _client(self, index : int):
""""""
|
[
"[email protected]"
] | |
7f53a02ffe54ce6ac59ddf141ab26147d828a8de
|
c79779a1233e95858499143d717a41205932c53d
|
/pypi_practices/check_readme.py
|
87c4304a51e43f77a2ba84c739eb4ed25c7ac8d5
|
[
"MIT"
] |
permissive
|
asottile-archive/pypi_practices
|
a8915fca09619f741f385c000bc98d84f9fd515f
|
a4da562c471198dd35806c52016fac44bb46c08d
|
refs/heads/master
| 2021-09-15T02:01:34.023997 | 2018-05-24T00:46:27 | 2018-05-24T00:46:27 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 668 |
py
|
from __future__ import print_function
import os.path
from pypi_practices.errors import FileValidationError
from pypi_practices.make_entry import make_entry
def check_readme(cwd):
readme_path = os.path.join(cwd, 'README.md')
if not os.path.exists(readme_path):
raise FileValidationError(
'README.md',
'File does not exist.',
is_auto_fixable=True,
)
# TODO: attempt to get project name from config
# TODO: attempt to get project name from tox.ini
# TODO: attempt to get project name from setup.py
return 0
entry = make_entry(check_readme)
if __name__ == '__main__':
exit(entry())
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.