blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
847306585ef3a46f500482752ba79115adf8c449 | 9d454ae0d5dd1d7e96e904ced80ca502019bb659 | /1550_threeConsecutiveOdds.py | 94c3d3a5aabfa1262828a5bafb4860615440bd8a | []
| no_license | zzz686970/leetcode-2018 | dad2c3db3b6360662a90ea709e58d7facec5c797 | 16e4343922041929bc3021e152093425066620bb | refs/heads/master | 2021-08-18T08:11:10.153394 | 2021-07-22T15:58:52 | 2021-07-22T15:58:52 | 135,581,395 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | def threeConsecutiveOdds(arr):
return any([False if i < 2 else arr[i-2] & arr[i-1] & arr[i] & 1 for i in range(len(arr))]) | [
"[email protected]"
]
| |
5909b8a429dde3c3db85365a4a2fcafe8504a73c | 6febd920ced70cbb19695801a163c437e7be44d4 | /leetcode_oj/string/strStr.py | b0bc9a66ec3e9adf67f316f37ee2da101b6c25ef | []
| no_license | AngryBird3/gotta_code | b0ab47e846b424107dbd3b03e0c0f3afbd239c60 | b9975fef5fa4843bf95d067bea6d064723484289 | refs/heads/master | 2021-01-20T16:47:35.098125 | 2018-03-24T21:31:01 | 2018-03-24T21:31:01 | 53,180,336 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | class Solution(object):
def strStr(self, haystack, needle):
"""
:type haystack: str
:type needle: str
:rtype: int
"""
if not haystack and not needle:
return 0
for i in range(len(haystack) - len(needle) + 1):
match = True
for j in range(len(needle)):
if haystack[i+j] != needle[j]:
match = False
break
if match:
return i
return -1
| [
"[email protected]"
]
| |
a7e448139f2bd614be72df1a7ece9dde49e3be0f | 2a7e44adc8744c55a25e3cafcc2fa19a1607e697 | /settings_inspector/management/commands/inspect_settings.py | 6f7437af5ca147fdd91a75feddb2467cdbec5bf7 | [
"MIT"
]
| permissive | fcurella/django-settings_inspector | 45529288dc8dde264383739c55abe6a9d2077ded | 69a6295de865f540d024e79aab4d211ce3c1d847 | refs/heads/master | 2020-06-04T01:57:17.216783 | 2012-01-05T19:05:12 | 2012-01-05T19:05:12 | 2,989,324 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256 | py | from settings_inspector.parser import Setting
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
def handle(self, *args, **options):
root_setting = Setting('django.conf')
import ipdb; ipdb.set_trace()
| [
"[email protected]"
]
| |
543d885088808f62594479bc296f3be23612b33d | 42fdf741bf64ea2e63d1546bb08356286f994505 | /test_20160921_macroblk_generation/rasp30_vmm_gen7.py | 5a540d9b8d4bc1419081b28fb7ceb4a4349b1771 | []
| no_license | skim819/RASP_Workspace_sihwan | 7e3cd403dc3965b8306ec203007490e3ea911e3b | 0799e146586595577c8efa05c647b8cb92b962f4 | refs/heads/master | 2020-12-24T05:22:25.775823 | 2017-04-01T22:15:18 | 2017-04-01T22:15:18 | 41,511,563 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 996 | py | self.dev_types =['fgota']*2 + ['ota_buf']*1 + ['ota']*1 + ['ota_vmm']*1 + ['cap']*4+ ['nfet']*2 + ['pfet']*2 + ['tgate']*4 + ['nmirror_vmm']*2+['ladder_blk']*1+ ['c4_blk']*1+ ['speech']*1+ ['INFneuron']*1+ ['lpf']*1+['nfet_i2v']*1+['pfet_i2v']*1+['nmirror_w_bias']*1+['fgswc_nmirror_w_bias']*1+['i2v_pfet_gatefgota']*1+['mismatch_meas']*1+['peak_detector']*1+['ramp_fe']*1+['sigma_delta_fe']*1+['vmm_senseamp1']*1+['vmm_senseamp2']*1+['wta']*1+['wta_primary']*1+['common_source']*1+['gnd_out']*1+['vdd_out']*1+['in2in_x1']*1+['in2in_x6']*1+['volt_div']*1+['volt_div_fgota']*1+['integrator']*1+['integrator_nmirror']*1+['fgswitch']*1+['tgate_so']*1+['vmm4x4_SR']*1+['vmm8x4_SR']*1+['SR4']*1+['vmm4x4_SR2']*1+['vmm4x4']*1+['sftreg']*1+['DAC_sftreg']*1 +['sftreg2']*1+['sftreg3']*1+['sftreg4']*1+['mmap_local_swc']*1+['th_logic']*1+['vmm8x4']*1+['vmm8inx8in']*1+['vmm8x4_in']*1+['vmm12x1']*1+['fg_io']*1+['ladder_filter']*1+['vmm12x1_wowta']*1+['TIA_blk']*1+['Adaptive_receptor']*1
+['testtemp']*1
| [
"ubuntu@ubuntu-VirtualBox.(none)"
]
| ubuntu@ubuntu-VirtualBox.(none) |
c8e02b9b5c879e0b86c644cc5d67238be6fee662 | 176497ba1cea7233f249a5f439a65f7c472b267f | /06_blog_detail/01_fix_capturing_path_components/portfolio/urls.py | 4a04f504ac61f76660e661e918f5599fa68f8d02 | []
| no_license | CodingNomads/django-web-dev | 79a3a94707489ca0d5f0bf49193b7ffdf6270f4a | e03b8ed130f100afb0296c0d76a84206fbbf789d | refs/heads/master | 2023-05-02T05:12:21.427462 | 2022-11-06T17:56:14 | 2022-11-06T17:56:14 | 235,174,521 | 1 | 7 | null | 2023-04-21T20:54:10 | 2020-01-20T18:53:31 | Python | UTF-8 | Python | false | false | 809 | py | """portfolio URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('projects/', include('projects.urls')),
]
| [
"[email protected]"
]
| |
a04179ec631fa9ee2c77775b4b950d00ead1cff3 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/machinelearning/azure-mgmt-machinelearningservices/generated_samples/registry/code_container/get.py | c8b1ce0c2a7ca85de612a46ed698cd5daf7180dc | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,674 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.machinelearningservices import MachineLearningServicesMgmtClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-machinelearningservices
# USAGE
python get.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = MachineLearningServicesMgmtClient(
credential=DefaultAzureCredential(),
subscription_id="00000000-1111-2222-3333-444444444444",
)
response = client.registry_code_containers.get(
resource_group_name="testrg123",
registry_name="testregistry",
code_name="testContainer",
)
print(response)
# x-ms-original-file: specification/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/stable/2023-04-01/examples/Registry/CodeContainer/get.json
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
c1d6cc614de179239cd85b1aff00551fe5a70de7 | 9130bdbd90b7a70ac4ae491ddd0d6564c1c733e0 | /venv/lib/python3.8/site-packages/numpy/lib/tests/test_function_base.py | 26214a10c602d958990ec1631e13077862370069 | []
| no_license | baruwaa12/Projects | 6ca92561fb440c63eb48c9d1114b3fc8fa43f593 | 0d9a7b833f24729095308332b28c1cde63e9414d | refs/heads/main | 2022-10-21T14:13:47.551218 | 2022-10-09T11:03:49 | 2022-10-09T11:03:49 | 160,078,601 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | /home/runner/.cache/pip/pool/da/24/8d/ec9cb0de3f3cfb086257743551eecf0a43e5ea4e63881af9e8d6632865 | [
"[email protected]"
]
| |
da080bc3ffe0ad4f0d4461acf3bf439970b3713b | d706f83450d32256e568ea2e279649b9d85ddb94 | /accounts/views.py | 8cd59810b95abf689b8f6bdf3151729484d2fb7d | []
| no_license | celord/advacneddjango | 146d3d4ae351803b37e8599225b38b948e42a8b7 | 044d172fb10556cdeede6888dcec5f466097754d | refs/heads/main | 2023-08-18T19:26:07.230821 | 2021-09-26T17:58:45 | 2021-09-26T17:58:45 | 406,921,992 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | # accounts/views.py
from django.urls import reverse_lazy
from django.views import generic
from .forms import CustomUserCreationForm
class SignupPageView(generic.CreateView):
form_class = CustomUserCreationForm
success_url = reverse_lazy('login')
template_name = 'registration/signup.html'
| [
"[email protected]"
]
| |
db62acc5b5c6704db566b47448faeaed2132e6ba | bb64d7194d9f7e8ef6fc2dbfdbc0569713d1079c | /FocalLoss.py | 74a05c5aa62338c5c30e91a1981482671095182f | []
| no_license | scott-mao/Top-Related-Meta-Learning-Method-for-Few-Shot-Detection | 471e7d6e71255333d9b4c929023d7e43ef19fdd2 | 49bfd702f41deaec60fa95314436f69b4e217e6f | refs/heads/main | 2023-04-11T13:00:13.358560 | 2021-04-27T02:24:23 | 2021-04-27T02:24:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,767 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# --------------------------------------------------------
# Licensed under The MIT License [see LICENSE for details]
# Written by Chao CHEN ([email protected])
# Created On: 2017-08-11
# --------------------------------------------------------
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class FocalLoss(nn.Module):
r"""
This criterion is a implemenation of Focal Loss, which is proposed in
Focal Loss for Dense Object Detection.
Loss(x, class) = - \alpha (1-softmax(x)[class])^gamma \log(softmax(x)[class])
The losses are averaged across observations for each minibatch.
Args:
alpha(1D Tensor, Variable) : the scalar factor for this criterion
gamma(float, double) : gamma > 0; reduces the relative loss for well-classified examples (p > .5),
putting more focus on hard, misclassified examples
size_average(bool): size_average(bool): By default, the losses are averaged over observations for each minibatch.
However, if the field size_average is set to False, the losses are
instead summed for each minibatch.
"""
def __init__(self, class_num, alpha=None, gamma=2, size_average=False):
super(FocalLoss, self).__init__()
if alpha is None:
self.alpha = Variable(torch.Tensor([[0.25]]*class_num))
else:
if isinstance(alpha, Variable):
self.alpha = alpha
else:
self.alpha = Variable(alpha)
self.gamma = gamma
self.class_num = class_num
self.size_average = size_average
def forward(self, inputs, targets):
N = inputs.size(0)
#print(N)
C = inputs.size(1)
P = F.softmax(inputs,dim=1)
#class_mask = inputs.data.new(N, C).fill_(0)
#class_mask = Variable(class_mask)
ids = targets.unsqueeze(-1)
#class_mask.scatter_(1, ids.data, 1.)
#class_mask = Variable(class_mask)
#print(class_mask)
class_mask=Variable(torch.zeros(N,C).scatter_(1,ids,1.0).cuda())
if inputs.is_cuda and not self.alpha.is_cuda:
self.alpha = self.alpha.cuda()
#print(self.alpha,Variable(ids).data.view(-1))
alpha = self.alpha[ids.squeeze(-1).cuda()]
probs = (P*class_mask).sum(1).view(-1,1)
log_p = probs.log()
#print('probs size= {}'.format(probs.size()))
#print(probs)
batch_loss = -alpha*(torch.pow((1-probs), self.gamma))*log_p
#print('-----bacth_loss------')
#print(batch_loss)
if self.size_average:
loss = batch_loss.mean()
else:
loss = batch_loss.sum()
return loss
if __name__ == "__main__":
alpha = torch.rand(21, 1)
print(alpha)
FL = FocalLoss(class_num=5, gamma=0 )
CE = nn.CrossEntropyLoss()
N = 4
C = 5
inputs = torch.rand(N, C)
targets = torch.LongTensor(N).random_(C)
inputs_fl = Variable(inputs.clone(), requires_grad=True)
targets_fl = Variable(targets.clone())
inputs_ce = Variable(inputs.clone(), requires_grad=True)
targets_ce = Variable(targets.clone())
print('----inputs----')
print(inputs)
print('---target-----')
print(targets)
fl_loss = FL(inputs_fl, targets_fl)
ce_loss = CE(inputs_ce, targets_ce)
print('ce = {}, fl ={}'.format(ce_loss.data[0], fl_loss.data[0]))
fl_loss.backward()
ce_loss.backward()
#print(inputs_fl.grad.data)
print(inputs_ce.grad.data)
| [
"[email protected]"
]
| |
e371120a4587f7edeed803eaedf3fa2de529f2e3 | 26ac73a3295abcd41d6124e05a62a775dc4111e9 | /src/ccl_malaria/logregs_fit.py | 047bdf03f160b3523ea921a3ac68ca6a19e38dc2 | []
| no_license | sdvillal/ccl-malaria | 78ed74740076981a51a301c2b6f2747eb18526dd | a28f7ef8f172c1374f5c079fdab8366333b2d56b | refs/heads/master | 2021-01-13T01:55:22.486971 | 2018-02-05T18:10:42 | 2018-02-05T18:10:42 | 17,605,429 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,959 | py | # coding=utf-8
"""Experiments with Morgan fingerprints and logistic regression (sklearn and vowpal wabbit)."""
from __future__ import print_function, division
from collections import OrderedDict
from copy import copy
import hashlib
from itertools import product
import os.path as op
import json
from time import time
import argh
import h5py
import joblib
from sklearn.base import clone
from sklearn.linear_model.logistic import LogisticRegression
from sklearn.metrics import roc_auc_score
import numpy as np
from ccl_malaria import MALARIA_EXPS_ROOT, info
from minioscail.common.eval import cv_splits, enrichment_at
from ccl_malaria.features import MurmurFolder, MalariaFingerprintsExampleSet
from ccl_malaria.results import predict_malaria_unlabelled, save_molids
from minioscail.common.config import mlexp_info_helper
from minioscail.common.misc import ensure_dir
MALARIA_LOGREGS_EXPERIMENT_ROOT = op.join(MALARIA_EXPS_ROOT, 'logregs')
#######################################
# The data version we will work with
#######################################
def malaria_logreg_fpt_providers(folder):
"""Returns a tuple (rf_lab, rf_amb, rf_unl, rf_scr) with the example-sets used in the logreg experiments."""
rf_lab = MalariaFingerprintsExampleSet(dset='lab', remove_ambiguous=True, zero_dupes='all', folder=folder)
rf_unl = MalariaFingerprintsExampleSet(dset='unl', remove_ambiguous=False, zero_dupes='all', folder=folder)
rf_amb = MalariaFingerprintsExampleSet(dset='amb', zero_dupes='all', folder=folder)
rf_scr = MalariaFingerprintsExampleSet(dset='scr',
remove_ambiguous=False,
zero_dupes=None, # N.B. dupes do not matter with logreg,
# faster not to do it (at least when streaming in my tests)
folder=folder)
return rf_lab, rf_amb, rf_unl, rf_scr
#######################################
# FIT the logistic regression models
#######################################
@argh.arg('--cv-seeds', nargs='+', type=int)
def fit(dest_dir=MALARIA_LOGREGS_EXPERIMENT_ROOT,
# Logreg params
penalty='l1',
C=1.0,
class_weight_auto=False,
dual=False,
tol=1e-4,
fit_intercept=True,
intercept_scaling=1,
# CV params
num_cv_folds=10,
cv_seeds=(0,),
save_unlabelled_predictions=False,
save_fold_model=False,
min_fold_auc=0.88,
# Fingerprint folding params
fingerprint_folder_seed=0,
fingerprint_fold_size=1023,
# Computational requirements params
force=False,
chunksize=1000000,
max_logreg_tol=1E-5):
"""Logistic regression experiment using the liblinear wrapper in sklearn.
Generates cross-val results
"""
if max_logreg_tol is not None and tol < max_logreg_tol:
info('Ignoring long intolerant experiments')
return
info('Malaria logregs experiment')
# Command line type inference is rotten...
C = float(C)
tol = float(tol)
intercept_scaling = float(intercept_scaling)
num_cv_folds = int(num_cv_folds)
min_fold_auc = float(min_fold_auc)
fingerprint_folder_seed = int(fingerprint_folder_seed)
fingerprint_fold_size = int(fingerprint_fold_size)
chunksize = int(chunksize)
# Example providers
folder = None if fingerprint_fold_size < 1 else MurmurFolder(seed=fingerprint_folder_seed,
fold_size=fingerprint_fold_size)
rf_lab, rf_amb, rf_unl, rf_scr = malaria_logreg_fpt_providers(folder)
info('Data description: %s' % rf_lab.configuration().id(nonids_too=True))
# Experiment context: data
data_id = rf_lab.configuration().id(nonids_too=True)
data_dir = op.join(dest_dir, data_id)
ensure_dir(data_dir)
for cv_seed in cv_seeds:
# Command line type inference is rotten...
cv_seed = int(cv_seed)
# Deterministic randomness
my_rng = np.random.RandomState(seed=cv_seed)
# Experiment context: model
logreg_params = OrderedDict((
('penalty', penalty),
('C', C),
('class_weight', 'auto' if class_weight_auto else None),
('dual', dual),
('tol', tol),
('fit_intercept', fit_intercept),
('intercept_scaling', intercept_scaling),
('random_state', my_rng.randint(low=0, high=4294967294)),
# Changed, from original 1000**4, to make liblinear happy
))
model_setup = LogisticRegression(**logreg_params)
model_id = 'skllogreg__%s' % '__'.join(['%s=%s' % (k, str(v)) for k, v in logreg_params.items()])
model_dir = op.join(data_dir, model_id)
ensure_dir(model_dir)
info('Model: %s' % model_id)
# Experiment context: eval
eval_id = 'cv__cv_seed=%d__num_folds=%d' % (cv_seed, num_cv_folds)
eval_dir = op.join(model_dir, eval_id)
ensure_dir(eval_dir)
info('Eval: %d-fold cross validation (seed=%d)' % (num_cv_folds, cv_seed))
# Already done?
info_file = op.join(eval_dir, 'info.json')
if op.isfile(info_file) and not force:
info('\tAlready done, skipping...')
return # Oh well, a lot have been done up to here... rework somehow
# Anytime we see this file, we know we need to stop
stop_computing_file = op.join(eval_dir, 'STOP_BAD_FOLD')
# --- Time to work!
# Save model config
joblib.dump(model_setup, op.join(model_dir, 'model_setup.pkl'), compress=3)
# Read labelled data in
info('Reading data...')
X, y = rf_lab.Xy()
info('ne=%d; nf=%d' % rf_lab.X().shape)
# Save molids... a bit too ad-hoc...
save_molids(data_dir, 'lab', rf_lab.ids())
if save_unlabelled_predictions:
save_molids(data_dir, 'unl', rf_unl.ids())
save_molids(data_dir, 'scr', rf_scr.ids())
save_molids(data_dir, 'amb', rf_amb.ids())
# Save folding information.
# By now, all the folds have already been computed:
# - because we cached X
# - and in this case we are warranted that no new unfolded features will appear at test time
if folder is not None:
info('Saving the map folded_features -> unfolded_feature...')
folded2unfolded_file = op.join(data_dir, 'folded2unfolded.h5')
if not op.isfile(folded2unfolded_file):
with h5py.File(folded2unfolded_file) as h5:
h5['f2u'] = folder.folded2unfolded()
folder_light_file = op.join(data_dir, 'folder.pkl')
if not op.isfile(folder_light_file):
folder_light = copy(folder) # Shallow copy
folder_light.clear_cache()
joblib.dump(folder_light, folder_light_file, compress=3)
# Cross-val splitter
cver = cv_splits(num_points=len(y),
Y=y,
num_folds=num_cv_folds,
rng=my_rng,
stratify=True)
# Fit and classify
for cv_fold_num in range(num_cv_folds):
fold_info_file = op.join(eval_dir, 'fold=%d__info.json' % cv_fold_num)
if op.isfile(fold_info_file):
info('Fold %d already done, skipping' % cv_fold_num)
continue
if op.isfile(stop_computing_file):
info('Bad fold detected, no more computations required')
break
# Split into train/test
train_i, test_i = cver(cv_fold_num)
Xtrain, ytrain = X[train_i, :], y[train_i]
Xtest, ytest = X[test_i, :], y[test_i]
assert len(set(train_i) & set(test_i)) == 0
# Copy the model...
model = clone(model_setup)
start = time()
info('Training...')
model.fit(Xtrain, ytrain)
train_time = time() - start
info('Model fitting has taken %.2f seconds' % train_time)
if save_fold_model:
info('Saving trained model')
joblib.dump(model, op.join(eval_dir, 'fold=%d__fitmodel.pkl' % cv_fold_num), compress=3)
info('Predicting and saving results...')
with h5py.File(op.join(eval_dir, 'fold=%d__scores.h5' % cv_fold_num), 'w') as h5:
start = time()
# Test indices
h5['test_indices'] = test_i
# Model
h5['logreg_coef'] = model.coef_
h5['logreg_intercept'] = model.intercept_
# Test examples
info('Scoring test...')
scores_test = model.predict_proba(Xtest)
fold_auc = roc_auc_score(ytest, scores_test[:, 1])
fold_enrichment5 = enrichment_at(ytest, scores_test[:, 1], percentage=0.05)
info('Fold %d ROCAUC: %.3f' % (cv_fold_num, fold_auc))
info('Fold %d Enrichment at 5%%: %.3f' % (cv_fold_num, fold_enrichment5))
h5['test'] = scores_test.astype(np.float32)
if save_unlabelled_predictions:
predict_malaria_unlabelled(model,
h5,
rf_amb=rf_amb,
rf_scr=rf_scr,
rf_unl=rf_unl,
chunksize=chunksize)
test_time = time() - start
info('Predicting has taken %.2f seconds' % test_time)
# Finally save meta-information for the fold
metainfo = mlexp_info_helper(
title='malaria-trees-oob',
data_setup=data_id,
model_setup=model_id,
exp_function=fit,
)
metainfo.update((
('train_time', train_time),
('test_time', test_time),
('auc', fold_auc),
('enrichment5', fold_enrichment5),
))
with open(fold_info_file, 'w') as writer:
json.dump(metainfo, writer, indent=2, sort_keys=False)
# One last thing, should we stop now?
if fold_auc < min_fold_auc:
stop_message = 'The fold %d was bad (auc %.3f < %.3f), skipping the rest of the folds' % \
(cv_fold_num, fold_auc, min_fold_auc)
info(stop_message)
with open(stop_computing_file, 'w') as writer:
writer.write(stop_message)
# Summarize cross-val in the info file
metainfo = mlexp_info_helper(
title='malaria-logregs-cv',
data_setup=data_id,
model_setup=model_id,
exp_function=fit,
)
metainfo.update((
('num_cv_folds', num_cv_folds),
('cv_seed', cv_seed),
))
metainfo.update(logreg_params.items())
with open(info_file, 'w') as writer:
json.dump(metainfo, writer, indent=2, sort_keys=False)
#######################################
# Generate command lines for many logreg experiments
#######################################
def sha_for_cl(cl):
params = cl.partition('fit-logregs ')[2].partition(' &>')[0]
return hashlib.sha256(params).hexdigest()
def cl(with_time=False):
"""Generate command lines for different experiments."""
all_commands = []
def gen_cl(num_foldss=(10,),
cv_seedsss=((0, 1, 2, 3, 4),), # FIXME: do not use a special case, it breaks parameters shas
penalties=('l1', 'l2'),
Cs=(0.001, 0.01, 0.1, 0.5, 1, 5, 10, 100, 1000),
class_autos=(True, False),
tols=(1E-4,),
duals=(False,),
fingerprint_fold_sizes=(0, 255, 511, 1023, 2047, 4095, 8191, 16383, 32767, 65537, 131073),
fingerprint_folder_seeds=(0, 1)):
"""
Generates command lines for the logistic regression tasks.
The default params are these used for the "not so crazy" experiment (stopped on Sunday morning)
"""
for i, (num_folds, cv_seeds, penalty, C, class_auto, tol, dual, ff_size, ff_seed) in \
enumerate(product(num_foldss, cv_seedsss, penalties, Cs, class_autos, tols, duals,
fingerprint_fold_sizes, fingerprint_folder_seeds)):
params = (
'--num-cv-folds %d' % num_folds,
'--cv-seeds %s' % ' '.join(map(str, cv_seeds)),
'--penalty %s' % penalty,
'--C %g' % C,
'--class-weight-auto' if class_auto else None,
'--tol %g' % tol,
'--dual' if dual else None,
'--fingerprint-fold-size %d' % ff_size,
'--fingerprint-folder-seed %d' % ff_seed
)
params = ' '.join(filter(lambda x: x is not None, params))
cl = 'PYTHONUNBUFFERED=1 '
if with_time:
cl += '/usr/bin/time -v '
cl += 'ccl-malaria logregs fit '
cl += params
cl += ' &>~/logreg-%s.log' % hashlib.sha256(params).hexdigest()
all_commands.append(cl)
#########################
#
# There are three basic tasks we want to do:
#
# 1- Logreg param selection:
# For this we would only need 1 cv seeds and 1 fpt seed, as the results are clearly consistent
# accross folds. Probably no need to do together with fp exploration (can reduce the number
# of fold sizes greatly). We would like to explore also at least tolerances and duals.
# We might want to use less number of folds (e.g. just 5 --> from 90% to 75% train size).
#
# 2- Fingerprint strategy exploration:
# We would just stick with what is done in the previous. An alternative that would be faster
# is to do parameter selection just with unfolded fingerprints (as anyway that is what we
# plan to do) and then apply the best logreg parameters to this phase. We could miss
# interactions but, oh well, that is life. This second faster way is what Flo planned.
#
# 3- Final model explotation and interpretation:
# For this we would need (a) unfolded feature vectors only (b) maybe more cvseeds (c) maybe boosting.
# This phase only depends on phase 1 and it is what we need to generate the predictions and interpretations.
# We could stick with Flo's insights and just use a few big Cs, l1 and class weights.
#
#########################
#
# From sklearn implementation:
# dual : boolean
# Dual or primal formulation. Dual formulation is only implemented for l2 penalty.
# Prefer dual=False when n_samples > n_features.
# So we should use dual when not using folding and regularizing via l2.
#
#########################
# Task 1: logreg parameter selection
gen_cl(num_foldss=(10,),
cv_seedsss=((0,),),
penalties=('l1', 'l2'),
Cs=(0.001, 0.01, 0.1, 0.5, 1, 5, 10, 100, 1000),
class_autos=(True, False),
tols=(1E-2, 1E-4), # 1E-6 Takes really long
duals=(False,),
fingerprint_fold_sizes=(0, 1023, 2047, 4095, 8191, 16383,),
fingerprint_folder_seeds=(0,))
# Task 2: fingerprint strategy exploration
gen_cl(num_foldss=(10,),
cv_seedsss=((0,),),
penalties=('l1',),
Cs=(1,),
class_autos=(True,),
tols=(1E-4,),
duals=(False,),
fingerprint_fold_sizes=(0, 255, 511, 1023, 2047, 4095, 8191, 16383, 32767, 65537, 131073),
fingerprint_folder_seeds=(0, 1, 2, 3))
# Task 3: deployment classifiers computation - only one long job...
gen_cl(num_foldss=(3, 5, 7, 10,),
cv_seedsss=((0,), (1,), (2,), (3,), (4,)),
penalties=('l1', 'l2',),
Cs=(1, 5,),
class_autos=(True,),
tols=(1E-4,),
duals=(False,),
fingerprint_fold_sizes=(0,),
fingerprint_folder_seeds=(0,))
# ---- Save the cls to files
all_commands = list(set(all_commands)) # Remove duplicates
# Proper balance of workloads between machines
destinies = (
('galileo', [], 0.30196078), # machine, cls, probability to be picked
('zeus', [], 0.25882353),
('str22', [], 0.18431373),
('strz', [], 0.25490196),
)
p_choice = [p for _, _, p in destinies]
rng = np.random.RandomState(2147483647)
for cl in all_commands:
_, your_destiny, _ = destinies[rng.choice(len(destinies), p=p_choice)]
your_destiny.append(cl)
# Save the selections
for name, cls, _ in destinies:
with open(op.join(op.dirname(__file__), '..', name), 'w') as writer:
writer.write('\n'.join(cls))
# Summary
total_cls = sum(len(cl) for _, cl, _ in destinies)
print('Total number of commands: %d' % total_cls)
for name, cls, p in destinies:
print('\t%s\t%d %g %g' % (name.ljust(30), len(cls), p, len(cls) / (total_cls + 1.)))
if __name__ == '__main__':
parser = argh.ArghParser()
parser.add_commands([cl, fit])
parser.dispatch()
# TODO: bring back from oscail configurable to model (urgent!) and eval (unnecessary, but good for consistency)
# TODO: use SGDClassifier to be able to use elastic net
# TODO: vowpal wabbit back to scene - it was the original idea for the tutorial!
| [
"[email protected]"
]
| |
135a875898921530dc0d9ed13b5bd02d13a96cbc | ee2af8c0fdc65f44ed9a4295806d75fb09257b58 | /saif/google_api_integreation/__manifest__.py | db20488330aedd7f71c7ecfb68f2ce9990548508 | []
| no_license | sc4you/odoo-project | 02b81ff4920a69d3e79c5dcc605a794779c5a77c | 2ef439ef54f1165c3569a1047cd5cb6a0b50572e | refs/heads/master | 2020-03-21T13:34:52.555402 | 2018-03-19T12:26:39 | 2018-03-19T12:26:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 452 | py | # -*- coding: utf-8 -*-
{
'name': 'Google Docs Integration',
'category': 'Extra Tools',
'summary': 'Spreadsheet, Document, Presentation',
'description': 'Google Docs Integration: This Module lets you to develop,'\
'read and modify Spreadsheet, Document, Presentation',
'author': 'Muhammad Awais',
'depends':['base','project','sale'],
'application': True,
'data': ['views/template.xml','security/security.xml','security/ir.model.access.csv'],
} | [
"[email protected]"
]
| |
6c0e4ea1a74613b04f657d103905ed557e74cd28 | e262e64415335060868e9f7f73ab8701e3be2f7b | /.history/Test002/数据类型_20201205183212.py | 0498acedcb8af7250b1f43f9d0736323a1f60b37 | []
| no_license | Allison001/developer_test | 6e211f1e2bd4287ee26fd2b33baf1c6a8d80fc63 | b8e04b4b248b0c10a35e93128a5323165990052c | refs/heads/master | 2023-06-18T08:46:40.202383 | 2021-07-23T03:31:54 | 2021-07-23T03:31:54 | 322,807,303 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,787 | py | # fruits = ['orange', 'apple', 'pear', 'banana', 'kiwi', 'apple', 'banana']
# print(fruits.count("apple"))
# a = fruits.index("banana",4)
# print(a)
# fruits.reverse()
# print(fruits)
# fruits.append("daka")
# print(fruits)
# print(fruits.sort)
# a = fruits.pop(0)
# print(a)
# print(fruits)
# number = [1,2,45,3,7,24,3]
# print(number.sort(reverse=True))
# from collections import deque
# queue = deque(["Eric", "John", "Michael"])
# queue.append("Terry")
# queue.append("Graham")
# a= queue.popleft()
# print(a)
# b = queue.popleft()
# print(b)
# print(queue)
# number = [1,2,3,4]
# number.append(5)
# number.append(6)
# print(number)
# number.pop()
# number.pop()
# print(number)
# lista = []
# for i in range(1,10):
# lista.append(i**2)
# print(lista)
# number = list(map(lambda x: x**2, range(1,10)))
# print(number)
# number = [i**2 for i in range(1,10)]
# print(number)
# number1= [(x,y) for x in [1,2,3] for y in [3,1,4] if x != y]
# print(number1)
# lis2 = []
# for x in [1,2,3]:
# for y in [3,1,4]:
# if x != y:
# lis2.append(x,y)
# print(number1)
# ver = [1,2,3]
# lista = [i**2 for i in ver]
# print(lista)
# ver1 = [-1,-2,3,4,-5]
# list2 = [i**2 for i in ver1 if i>0]
# print(list2)
# list3 = [abs(i) for i in ver1]
# print(list3)
# freshfruit = [' banana', ' loganberry ', 'passion fruit ']
# ab = [i.strip() for i in freshfruit]
# print(ab)
# list4 =[(x,x**2) for x in range(10)]
# print(list4)
# ver =[[1,2,3],[4,5,6],[7,8,9]]
# list5 = [y for i in ver for y in i]
# print(list5)
# from math import pi
# pia = 1.1323123
# for i in range(6):
# print(round(pia,i))
# list6 = [round(pia,i) for i in range(6)]
# print(list6)
#交换行和列
row_col = [
[1,4,7],
[2,5,6],
[3,6,9]
] | [
"[email protected]"
]
| |
35a79b4fdf4e0ea3e84ae6fc05370aca40fda015 | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/cloud/eventarc/v1/eventarc-v1-py/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py | f6847c60fe490f76ea1ee075399c4f7fea0ff76a | [
"Apache-2.0"
]
| permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,587 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.eventarc_v1.types import eventarc
from google.cloud.eventarc_v1.types import trigger
from google.longrunning import operations_pb2 # type: ignore
from .base import EventarcTransport, DEFAULT_CLIENT_INFO
class EventarcGrpcTransport(EventarcTransport):
"""gRPC backend transport for Eventarc.
Eventarc allows users to subscribe to various events that are
provided by Google Cloud services and forward them to supported
destinations.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(self, *,
host: str = 'eventarc.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(cls,
host: str = 'eventarc.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Sanity check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def get_trigger(self) -> Callable[
[eventarc.GetTriggerRequest],
trigger.Trigger]:
r"""Return a callable for the get trigger method over gRPC.
Get a single trigger.
Returns:
Callable[[~.GetTriggerRequest],
~.Trigger]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_trigger' not in self._stubs:
self._stubs['get_trigger'] = self.grpc_channel.unary_unary(
'/google.cloud.eventarc.v1.Eventarc/GetTrigger',
request_serializer=eventarc.GetTriggerRequest.serialize,
response_deserializer=trigger.Trigger.deserialize,
)
return self._stubs['get_trigger']
@property
def list_triggers(self) -> Callable[
[eventarc.ListTriggersRequest],
eventarc.ListTriggersResponse]:
r"""Return a callable for the list triggers method over gRPC.
List triggers.
Returns:
Callable[[~.ListTriggersRequest],
~.ListTriggersResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_triggers' not in self._stubs:
self._stubs['list_triggers'] = self.grpc_channel.unary_unary(
'/google.cloud.eventarc.v1.Eventarc/ListTriggers',
request_serializer=eventarc.ListTriggersRequest.serialize,
response_deserializer=eventarc.ListTriggersResponse.deserialize,
)
return self._stubs['list_triggers']
@property
def create_trigger(self) -> Callable[
[eventarc.CreateTriggerRequest],
operations_pb2.Operation]:
r"""Return a callable for the create trigger method over gRPC.
Create a new trigger in a particular project and
location.
Returns:
Callable[[~.CreateTriggerRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_trigger' not in self._stubs:
self._stubs['create_trigger'] = self.grpc_channel.unary_unary(
'/google.cloud.eventarc.v1.Eventarc/CreateTrigger',
request_serializer=eventarc.CreateTriggerRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['create_trigger']
@property
def update_trigger(self) -> Callable[
[eventarc.UpdateTriggerRequest],
operations_pb2.Operation]:
r"""Return a callable for the update trigger method over gRPC.
Update a single trigger.
Returns:
Callable[[~.UpdateTriggerRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_trigger' not in self._stubs:
self._stubs['update_trigger'] = self.grpc_channel.unary_unary(
'/google.cloud.eventarc.v1.Eventarc/UpdateTrigger',
request_serializer=eventarc.UpdateTriggerRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['update_trigger']
@property
def delete_trigger(self) -> Callable[
[eventarc.DeleteTriggerRequest],
operations_pb2.Operation]:
r"""Return a callable for the delete trigger method over gRPC.
Delete a single trigger.
Returns:
Callable[[~.DeleteTriggerRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_trigger' not in self._stubs:
self._stubs['delete_trigger'] = self.grpc_channel.unary_unary(
'/google.cloud.eventarc.v1.Eventarc/DeleteTrigger',
request_serializer=eventarc.DeleteTriggerRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['delete_trigger']
__all__ = (
'EventarcGrpcTransport',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
]
| bazel-bot-development[bot]@users.noreply.github.com |
1b8bf4767e7e81816c259accadb336a80b752300 | 1a13cf55de87bf9fd7cd5911ab7bd9d9c1f88241 | /tests/programscache.py | 2382802032fded66be7aa14c186b0adc155ddd14 | [
"Apache-2.0"
]
| permissive | jeperez/winreg-kb | c59ead2d593b4ec375b77d7a9c49fbec35b9f156 | a50fcfc89e3fac282f276b12fb67807ddb56ef10 | refs/heads/master | 2021-01-19T11:34:33.293074 | 2017-04-03T06:28:00 | 2017-04-03T06:28:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,086 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the Programs Cache information collector."""
import unittest
from dfwinreg import registry as dfwinreg_registry
from winregrc import collector
from winregrc import output_writer
from winregrc import programscache
from tests import test_lib as shared_test_lib
class TestOutputWriter(output_writer.StdoutOutputWriter):
"""Class that defines a test output writer.
Attributes:
text (list[str]): text.
"""
def __init__(self):
"""Initializes an output writer object."""
super(TestOutputWriter, self).__init__()
self.text = []
def WriteText(self, text):
"""Writes text to stdout.
Args:
text: the text to write.
"""
self.text.append(text)
class ProgramsCacheDataParserTest(shared_test_lib.BaseTestCase):
"""Tests for the Programs Cache data parser."""
# TODO: add tests.
class ProgramsCacheCollectorTest(shared_test_lib.BaseTestCase):
"""Tests for the Programs Cache information collector."""
@shared_test_lib.skipUnlessHasTestFile([u'NTUSER.DAT'])
def testCollect(self):
"""Tests the Collect function."""
registry_collector = collector.WindowsRegistryCollector()
test_path = self._GetTestFilePath([u'NTUSER.DAT'])
registry_collector.ScanForWindowsVolume(test_path)
self.assertIsNotNone(registry_collector.registry)
collector_object = programscache.ProgramsCacheCollector()
test_output_writer = TestOutputWriter()
collector_object.Collect(registry_collector.registry, test_output_writer)
test_output_writer.Close()
# TODO: fix test.
self.assertEqual(test_output_writer.text, [])
def testCollectEmpty(self):
"""Tests the Collect function on an empty Registry."""
registry = dfwinreg_registry.WinRegistry()
collector_object = programscache.ProgramsCacheCollector()
test_output_writer = TestOutputWriter()
collector_object.Collect(registry, test_output_writer)
test_output_writer.Close()
self.assertEqual(len(test_output_writer.text), 0)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
fa05b68b68103da6eba41cb3eace31abf9f4ba74 | 4a5a39858bab54d9fe06364ecfe8edc2747b87f6 | /Code Jam 2018/Round 1C/ant-stack.py | ddc1e8535674f7ccbaa880e66e31d6b637f53b28 | []
| no_license | gsakkas/code-jam | d85a63c11d13ba405b9df4be1e6739ef5c5394ae | 8e81a4d9b2ea11d9bbb9b3e206951a2261798458 | refs/heads/master | 2021-01-22T19:54:24.732574 | 2018-05-17T12:14:06 | 2018-05-17T12:14:06 | 85,257,349 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 888 | py | def read_int():
return int(raw_input())
def read_many_ints():
return map(int, raw_input().split())
def solve(n):
ws = read_many_ints()
large = 10 ** 100
sums = [[large] * min(139, n) for _ in xrange(n)]
for i in xrange(n):
sums[i][0] = ws[i]
# sums = {}
# sums[0] = ws[0]
# for i in xrange(1, n):
# sums[i] = 0
for i in xrange(1, n):
for j in xrange(1, min(139, i + 1)):
if sums[i - 1][j - 1] <= 6 * ws[i]:
sums[i][j] = min(sums[i - 1][j - 1] + ws[i], sums[i - 1][j])
else:
sums[i][j] = sums[i - 1][j]
j = n - 1
while j >= 0 and sums[n - 1][j] == large:
j -= 1
return j + 1
if __name__ == "__main__":
t = read_int()
for test in xrange(1, t + 1):
n = read_int()
print "Case #{}: {}".format(test, solve(n))
exit(0)
| [
"[email protected]"
]
| |
af7343241d25adfa0239fc48d6b1c29e0fd2cfcf | 360ae1188ad79e71ccc72da0b9ae709bda678f91 | /ryu/services/protocols/__init__.py | 340a42305b81a40727ffe472e0a96ccaa638aed4 | [
"Apache-2.0"
]
| permissive | faucetsdn/ryu | 47b3523e7ccb381f3bdf2877a3f9f01cb1876054 | d6cda4f427ff8de82b94c58aa826824a106014c2 | refs/heads/master | 2023-09-05T06:37:21.991029 | 2022-06-09T23:09:40 | 2022-06-09T23:09:40 | 2,945,007 | 385 | 215 | Apache-2.0 | 2022-11-13T10:50:25 | 2011-12-09T03:43:50 | Python | UTF-8 | Python | false | false | 682 | py | # Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2013 Isaku Yamahata <yamahata at private email ne jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| [
"[email protected]"
]
| |
b96de974ca34505ea68a7002e1eaca1fdf7e1661 | 076e0ebd618ed406808e9009a70d886e8bdb1bbf | /grafeas/grafeas_v1/__init__.py | 98e1ad1c8c28a4c25705f3c56a2ad03ad7d539b0 | [
"Apache-2.0"
]
| permissive | isabella232/python-grafeas | 8edb1c3b79e51292f1612489775b51a96033049c | a806330d0f344eb0b97e351d7e5ba34b8ae9b740 | refs/heads/master | 2022-12-15T09:53:51.979968 | 2020-09-22T22:15:19 | 2020-09-22T22:15:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,916 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .services.grafeas import GrafeasClient
from .types.attestation import AttestationNote
from .types.attestation import AttestationOccurrence
from .types.build import BuildNote
from .types.build import BuildOccurrence
from .types.common import NoteKind
from .types.common import RelatedUrl
from .types.common import Signature
from .types.cvss import CVSSv3
from .types.deployment import DeploymentNote
from .types.deployment import DeploymentOccurrence
from .types.discovery import DiscoveryNote
from .types.discovery import DiscoveryOccurrence
from .types.grafeas import BatchCreateNotesRequest
from .types.grafeas import BatchCreateNotesResponse
from .types.grafeas import BatchCreateOccurrencesRequest
from .types.grafeas import BatchCreateOccurrencesResponse
from .types.grafeas import CreateNoteRequest
from .types.grafeas import CreateOccurrenceRequest
from .types.grafeas import DeleteNoteRequest
from .types.grafeas import DeleteOccurrenceRequest
from .types.grafeas import GetNoteRequest
from .types.grafeas import GetOccurrenceNoteRequest
from .types.grafeas import GetOccurrenceRequest
from .types.grafeas import ListNoteOccurrencesRequest
from .types.grafeas import ListNoteOccurrencesResponse
from .types.grafeas import ListNotesRequest
from .types.grafeas import ListNotesResponse
from .types.grafeas import ListOccurrencesRequest
from .types.grafeas import ListOccurrencesResponse
from .types.grafeas import Note
from .types.grafeas import Occurrence
from .types.grafeas import UpdateNoteRequest
from .types.grafeas import UpdateOccurrenceRequest
from .types.image import Fingerprint
from .types.image import ImageNote
from .types.image import ImageOccurrence
from .types.image import Layer
from .types.package import Architecture
from .types.package import Distribution
from .types.package import Location
from .types.package import PackageNote
from .types.package import PackageOccurrence
from .types.package import Version
from .types.provenance import AliasContext
from .types.provenance import Artifact
from .types.provenance import BuildProvenance
from .types.provenance import CloudRepoSourceContext
from .types.provenance import Command
from .types.provenance import FileHashes
from .types.provenance import GerritSourceContext
from .types.provenance import GitSourceContext
from .types.provenance import Hash
from .types.provenance import ProjectRepoId
from .types.provenance import RepoId
from .types.provenance import Source
from .types.provenance import SourceContext
from .types.upgrade import UpgradeDistribution
from .types.upgrade import UpgradeNote
from .types.upgrade import UpgradeOccurrence
from .types.upgrade import WindowsUpdate
from .types.vulnerability import Severity
from .types.vulnerability import VulnerabilityNote
from .types.vulnerability import VulnerabilityOccurrence
__all__ = (
"AliasContext",
"Architecture",
"Artifact",
"AttestationNote",
"AttestationOccurrence",
"BatchCreateNotesRequest",
"BatchCreateNotesResponse",
"BatchCreateOccurrencesRequest",
"BatchCreateOccurrencesResponse",
"BuildNote",
"BuildOccurrence",
"BuildProvenance",
"CVSSv3",
"CloudRepoSourceContext",
"Command",
"CreateNoteRequest",
"CreateOccurrenceRequest",
"DeleteNoteRequest",
"DeleteOccurrenceRequest",
"DeploymentNote",
"DeploymentOccurrence",
"DiscoveryNote",
"DiscoveryOccurrence",
"Distribution",
"FileHashes",
"Fingerprint",
"GerritSourceContext",
"GetNoteRequest",
"GetOccurrenceNoteRequest",
"GetOccurrenceRequest",
"GitSourceContext",
"Hash",
"ImageNote",
"ImageOccurrence",
"Layer",
"ListNoteOccurrencesRequest",
"ListNoteOccurrencesResponse",
"ListNotesRequest",
"ListNotesResponse",
"ListOccurrencesRequest",
"ListOccurrencesResponse",
"Location",
"Note",
"NoteKind",
"Occurrence",
"PackageNote",
"PackageOccurrence",
"ProjectRepoId",
"RelatedUrl",
"RepoId",
"Severity",
"Signature",
"Source",
"SourceContext",
"UpdateNoteRequest",
"UpdateOccurrenceRequest",
"UpgradeDistribution",
"UpgradeNote",
"UpgradeOccurrence",
"Version",
"VulnerabilityNote",
"VulnerabilityOccurrence",
"WindowsUpdate",
"GrafeasClient",
)
| [
"[email protected]"
]
| |
a123b05a0d90ab5de3dad6be2814c36888231339 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/8/uzi.py | aa6e8089d0fcbff6ad8c6136fb49730dbf1ce425 | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'uZI':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
6592fd4e7614f3aa792b9305227977f7af952754 | 6994917b9d22e9e15e578a0e5c75dcf4ce3cb022 | /perfil/migrations/0025_auto_20200724_2157.py | e5dd79e636927224e5bd7b39a7907b3d99b39094 | []
| no_license | linikerunk/rh-ticket | 59ad6411a3d08c90c2704b37ba9bba67ea7f7754 | bd8edd3eb1ea6cfe04fee03a4f41049a84c1e14a | refs/heads/master | 2023-01-06T21:25:06.851369 | 2020-10-29T20:32:53 | 2020-10-29T20:32:53 | 250,346,547 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 543 | py | # Generated by Django 2.2.9 on 2020-07-25 00:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('perfil', '0024_auto_20200724_2049'),
]
operations = [
migrations.AlterField(
model_name='funcionario',
name='centro_de_custo_link',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='perfil.CentroDeCusto', verbose_name='Centro de Custo link'),
),
]
| [
"[email protected]"
]
| |
15261973dd39d79408de00f20eb9f216067056de | e56214188faae8ebfb36a463e34fc8324935b3c2 | /test/test_hyperflex_st_platform_cluster_resiliency_info_ref.py | b4d5a1014524481f5a6bfa8835557ef078252d03 | [
"Apache-2.0"
]
| permissive | CiscoUcs/intersight-python | 866d6c63e0cb8c33440771efd93541d679bb1ecc | a92fccb1c8df4332ba1f05a0e784efbb4f2efdc4 | refs/heads/master | 2021-11-07T12:54:41.888973 | 2021-10-25T16:15:50 | 2021-10-25T16:15:50 | 115,440,875 | 25 | 18 | Apache-2.0 | 2020-03-02T16:19:49 | 2017-12-26T17:14:03 | Python | UTF-8 | Python | false | false | 2,123 | py | # coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import intersight
from intersight.models.hyperflex_st_platform_cluster_resiliency_info_ref import HyperflexStPlatformClusterResiliencyInfoRef # noqa: E501
from intersight.rest import ApiException
class TestHyperflexStPlatformClusterResiliencyInfoRef(unittest.TestCase):
"""HyperflexStPlatformClusterResiliencyInfoRef unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testHyperflexStPlatformClusterResiliencyInfoRef(self):
"""Test HyperflexStPlatformClusterResiliencyInfoRef"""
# FIXME: construct object with mandatory attributes with example values
# model = intersight.models.hyperflex_st_platform_cluster_resiliency_info_ref.HyperflexStPlatformClusterResiliencyInfoRef() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
b2e93044857996da4f7864f49e1ad69a6546cb0b | 7c15f211adc9e9eb9f66ccdd570c9f38dff7ea8d | /packages/autorest.python/test/vanilla/version-tolerant/Expected/AcceptanceTests/HttpVersionTolerant/httpinfrastructureversiontolerant/aio/__init__.py | 154a90083e55b0b0e6ba806098e8782f6d24a683 | [
"LicenseRef-scancode-generic-cla",
"MIT"
]
| permissive | Azure/autorest.python | cc4bfbf91ae11535731cad37cedd6b733edf1ebd | a00d7aaa3753ef05cb5a0d38c664a90869478d44 | refs/heads/main | 2023-09-03T06:58:44.246200 | 2023-08-31T20:11:51 | 2023-08-31T20:11:51 | 100,315,955 | 47 | 40 | MIT | 2023-09-14T21:00:21 | 2017-08-14T22:58:33 | Python | UTF-8 | Python | false | false | 865 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._client import AutoRestHttpInfrastructureTestService
try:
from ._patch import __all__ as _patch_all
from ._patch import * # pylint: disable=unused-wildcard-import
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"AutoRestHttpInfrastructureTestService",
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk()
| [
"[email protected]"
]
| |
457b3c5c3ef7b6c87f2a2cb3e30e182a396b8713 | 7eadfc1711278719d4f02cb506f1d1df88cc66af | /model/supervised/cnn.py | 4110058df864f680190dd1ee7dbbb410a1114a2a | []
| no_license | fagan2888/Trading_by_Imitation_Learning | 33a88060e45e38d83b9b0972072cc1bcddcf0bdc | 0f6820609c64dd0a1e697ec2ac4566b60478025d | refs/heads/master | 2020-11-30T23:06:21.280800 | 2019-05-16T06:27:01 | 2019-05-16T06:27:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,477 | py | """
Train a supervised CNN model using optimal stock as label
"""
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D
from keras.models import load_model
from keras.optimizers import Adam
from ..base_model import BaseModel
from utils.data import normalize
import numpy as np
import tensorflow as tf
class StockCNN(BaseModel):
def __init__(self, nb_classes, window_length, weights_file='weights/cnn.h5'):
self.model = None
self.weights_file = weights_file
self.nb_classes = nb_classes
self.window_length = window_length
def build_model(self, load_weights=True):
""" Load training history from path
Args:
load_weights (Bool): True to resume training from file or just deploying.
Otherwise, training from scratch.
Returns:
"""
if load_weights:
self.model = load_model(self.weights_file)
print('Successfully loaded model')
else:
self.model = Sequential()
self.model.add(
Conv2D(filters=32, kernel_size=(1, 3), input_shape=(self.nb_classes, self.window_length, 1),
activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Conv2D(filters=32, kernel_size=(1, self.window_length - 2), activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Flatten())
self.model.add(Dense(64, activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Dense(64, activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Dense(self.nb_classes, activation='softmax'))
self.model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=1e-3),
metrics=['accuracy'])
print('Built model from scratch')
self.model._make_predict_function()
self.graph = tf.get_default_graph()
def train(self, X_train, Y_train, X_val, Y_val, verbose=True):
continue_train = True
while continue_train:
self.model.fit(X_train, Y_train, batch_size=128, epochs=10, validation_data=(X_val, Y_val),
shuffle=True, verbose=verbose)
save_weights = input('Type True to save weights\n')
if save_weights:
self.model.save(self.weights_file)
continue_train = input("True to continue train, otherwise stop training...\n")
print('Finish.')
def evaluate(self, X_test, Y_test, verbose=False):
return self.model.evaluate(X_test, Y_test, verbose=verbose)
def predict(self, X_test, verbose=False):
return self.model.predict(X_test, verbose=verbose)
def predict_single(self, observation):
""" Predict the action of a single observation
Args:
observation: (num_stocks + 1, window_length)
Returns: a single action array with shape (num_stocks + 1,)
"""
obsX = observation[:, -self.window_length:, 3:4] / observation[:, -self.window_length:, 0:1]
obsX = normalize(obsX)
obsX = np.expand_dims(obsX, axis=0)
with self.graph.as_default():
return np.squeeze(self.model.predict(obsX), axis=0)
| [
"[email protected]"
]
| |
8e962c66a9d91dae37bddea35a9bff26c992c521 | 7a550d2268bc4bc7e2fec608ffb1db4b2e5e94a0 | /1101-1200/1110-Delete Nodes And Return Forest/1110-Delete Nodes And Return Forest.py | 3219a3ad56f0c557fe3ebbf9025c4afa1c801801 | [
"MIT"
]
| permissive | jiadaizhao/LeetCode | be31bd0db50cc6835d9c9eff8e0175747098afc6 | 4ddea0a532fe7c5d053ffbd6870174ec99fc2d60 | refs/heads/master | 2021-11-05T04:38:47.252590 | 2021-10-31T09:54:53 | 2021-10-31T09:54:53 | 99,655,604 | 52 | 28 | MIT | 2020-10-02T12:47:47 | 2017-08-08T05:57:26 | C++ | UTF-8 | Python | false | false | 803 | py | # Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def delNodes(self, root: TreeNode, to_delete: List[int]) -> List[TreeNode]:
td = set(to_delete)
result = []
def dfs(root, prevDel):
if root is None:
return None
if root.val in td:
dfs(root.left, True)
dfs(root.right, True)
return None
else:
if prevDel == True:
result.append(root)
root.left = dfs(root.left, False)
root.right = dfs(root.right, False)
return root
dfs(root, True)
return result
| [
"[email protected]"
]
| |
4db824ca06fcb183b3cdd4afb8c1407541610ba5 | d8c1f119d1349dd8ad2e48619a8c258967cd9a31 | /Baekjun/Dijkstra/10282. 해킹.py | 57994f6df0e2f7d52580d8b02723c1259ebd1a75 | []
| no_license | Seonghyeony/DataStructure-Algorithm | c7c006ee705b68fc4d2d04dc6baaf0aeb80fc83e | 4121289cafd0050bda408934fcb14d88052c956f | refs/heads/master | 2023-04-12T16:30:17.039109 | 2021-05-08T10:31:05 | 2021-05-08T10:31:05 | 286,371,604 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,060 | py | import heapq
def dijkstra(graph, n, start):
distances = [float('inf') for _ in range(n + 1)]
distances[start] = 0
queue = []
heapq.heappush(queue, [0, start])
while queue:
current_distance, current_node = heapq.heappop(queue)
if distances[current_node] < current_distance:
continue
for adjacent, adjacent_distance in graph[current_node]:
distance = current_distance + adjacent_distance
if distance < distances[adjacent]:
distances[adjacent] = distance
heapq.heappush(queue, [distance, adjacent])
count = 0
ret = float('-inf')
for i in range(1, n + 1):
if distances[i] != float('inf'):
count += 1
ret = max(ret, distances[i])
print(count, ret)
test_case = int(input())
for _ in range(test_case):
n, d, c = map(int, input().split())
adj = [[] for _ in range(n + 1)]
for _ in range(d):
a, b, s = map(int, input().split())
adj[b].append([a, s])
dijkstra(adj, n, c)
| [
"[email protected]"
]
| |
a467cc78728963d989a66e2ae338212b606e652f | 037d5d18b9b81205305e158d7d9fdad131d318cb | /tests/test_custom_version_base_class.py | cc5e981dbda96d06ba8914b794c8350a37b1e0a2 | []
| permissive | kvesteri/sqlalchemy-continuum | ee7acf2c961b27eab3dd8f61598d9159d801ee21 | a7a6bd7952185b1f82af985c0271834d886a617c | refs/heads/master | 2023-08-24T09:14:33.515416 | 2022-11-17T05:41:09 | 2023-07-24T23:37:12 | 10,312,759 | 479 | 134 | BSD-3-Clause | 2023-09-12T20:07:04 | 2013-05-27T10:30:55 | Python | UTF-8 | Python | false | false | 1,114 | py | import sqlalchemy as sa
from sqlalchemy_continuum import version_class
from tests import TestCase
class TestCommonBaseClass(TestCase):
def create_models(self):
class TextItem(self.Model):
__tablename__ = 'text_item'
__versioned__ = {}
id = sa.Column(sa.Integer, autoincrement=True, primary_key=True)
class ArticleVersionBase(self.Model):
__abstract__ = True
class Article(self.Model):
__tablename__ = 'article'
__versioned__ = {
'base_classes': (ArticleVersionBase, )
}
id = sa.Column(sa.Integer, autoincrement=True, primary_key=True)
self.TextItem = TextItem
self.Article = Article
self.ArticleVersionBase = ArticleVersionBase
def test_each_class_has_distinct_translation_class(self):
class_ = version_class(self.TextItem)
assert class_.__name__ == 'TextItemVersion'
class_ = version_class(self.Article)
assert class_.__name__ == 'ArticleVersion'
assert issubclass(class_, self.ArticleVersionBase)
| [
"[email protected]"
]
| |
5e14ac1175f45b85ceb256c7a8522a73237bc1f4 | 2aa84f9553a0593593afff720f7dfd8c6df3adde | /tests/test_query_parser.py | 3c2d26f25e088261c212386f85d9a8ce25602370 | []
| no_license | Pavel-Guseynov/sqlalchemy-searchable | c492f37e10e0e9054914af1f20cf799a58b9e8aa | 6baa13193f2f2a39ba96b231ee7f88843bdd6fd0 | refs/heads/master | 2021-07-25T14:02:07.876195 | 2017-11-06T16:17:05 | 2017-11-06T16:17:05 | 109,388,493 | 0 | 0 | null | 2017-11-03T11:35:01 | 2017-11-03T11:35:01 | null | UTF-8 | Python | false | false | 1,746 | py | # -*- coding: utf-8 -*-
from pyparsing import ParseException
from pytest import raises
from sqlalchemy_searchable.parser import SearchQueryParser
class TestSearchQueryParser(object):
def setup_method(self, method):
self.parser = SearchQueryParser()
def test_unicode(self):
assert self.parser.parse(u'안녕가は') == u'안녕가は:*'
def test_empty_string(self):
with raises(ParseException):
self.parser.parse('')
def test_or(self):
assert self.parser.parse('star or wars') == 'star:* | wars:*'
def test_multiple_ors(self):
assert self.parser.parse('star or or or wars') == 'star:* | wars:*'
def test_space_as_and(self):
assert self.parser.parse('star wars') == 'star:* & wars:*'
def test_multiple_spaces_as_and(self):
assert (
self.parser.parse('star wars luke') ==
'star:* & wars:* & luke:*'
)
def test_parenthesis(self):
assert self.parser.parse('(star wars) or luke') == (
'(star:* & wars:*) | luke:*'
)
def test_or_and(self):
assert (
self.parser.parse('star or wars luke or solo') ==
'star:* | wars:* & luke:* | solo:*'
)
def test_empty_parenthesis(self):
with raises(ParseException):
assert self.parser.parse('()')
def test_nested_parenthesis(self):
assert self.parser.parse('((star wars)) or luke') == (
'(star:* & wars:*) | luke:*'
)
def test_not(self):
assert self.parser.parse('-star') == (
'! star:*'
)
def test_not_with_parenthesis(self):
assert self.parser.parse('-(star wars)') == '! (star:* & wars:*)'
| [
"[email protected]"
]
| |
65cc4b40f81149e478236ca1e329f99ffc8fcb82 | e63c1e59b2d1bfb5c03d7bf9178cf3b8302ce551 | /uri/uri_python/ad_hoc/p2456.py | fd6b70e0c37682c826aa6690b0340d8b59f32ede | []
| no_license | GabrielEstevam/icpc_contest_training | b8d97184ace8a0e13e1c0bf442baa36c853a6837 | 012796c2ceb901cf7aa25d44a93614696a7d9c58 | refs/heads/master | 2020-04-24T06:15:16.826669 | 2019-10-08T23:13:15 | 2019-10-08T23:13:15 | 171,758,893 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 241 | py | entry = input().split(" ")
a = int(entry[0])
b = int(entry[1])
c = int(entry[2])
d = int(entry[3])
e = int(entry[4])
if a < b and b < c and c < d and d < e:
print('C')
elif a > b and b > c and c > d and d > e:
print('D')
else:
print('N') | [
"[email protected]"
]
| |
2555d67c9356f76316ca075fb7052e0bc3678ccc | 0c43ae8365998144ebc23156c12768711114e6f9 | /web_flask/4-number_route.py | 1aa7ec504db5b6beaf7e14f834086abdbf92165a | []
| no_license | Nesgomez9/AirBnB_clone_v2 | 74e343ade1c418b49c8ebaee79f6319f8e971ff6 | 055c4e92c819fd0e9dec369e687c1601f243f02c | refs/heads/master | 2021-05-19T12:50:35.656686 | 2020-04-23T03:33:41 | 2020-04-23T03:33:41 | 251,707,487 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 746 | py | #!/usr/bin/python3
from flask import Flask
app = Flask(__name__)
@app.route("/", strict_slashes=False)
def hello_route():
return "Hello HBNB!"
@app.route("/hbnb", strict_slashes=False)
def hbnb_route():
return "HBNB"
@app.route("/c/<text>", strict_slashes=False)
def c_route(text):
text = text.replace("_", " ")
return "C {}".format(text)
@app.route("/python/<text>", strict_slashes=False)
@app.route("/python", strict_slashes=False)
def python_route(text="is cool"):
text = text.replace("_", " ")
return "Python {}".format(text)
@app.route("/number/<int:n>", strict_slashes=False)
def number_route(n):
return "{:d} is a number".format(n)
if __name__ == "__main__":
app.run(host="0.0.0.0", port="5000")
| [
"[email protected]"
]
| |
e1fee3842a2ba41bf122a82af2236ea8f8fad717 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/v_sniper/sniper-master/cron.py | 39485faf71647c6676d5605dd32b483c4befdc2b | []
| no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 3,777 | py | #!/usr/bin/env python
""" This represents the cronjob that runs to check for course openings"""
from flaskext.mail import Message
import urllib
from models import db, Snipe
from soc import Soc
from app import mail, app
import datetime
from collections import namedtuple
soc = Soc()
EMAIL_SENDER = "Course Sniper <[email protected]>"
Section = namedtuple('Section', ['number', 'index'])
def poll(subject, result=False):
""" Poll a subject for open courses. """
app.logger.warning("Polling for %s" % (subject))
# get all the course data from SOC
courses = soc.get_courses(subject)
# build information about which courses/sections are currently open.
open_data = {}
if courses is not None:
for course in courses:
course_number = course['courseNumber']
# remove leading zeroes
if course_number.isdigit():
course_number = str(int(course_number))
open_data[course_number] = []
for section in course['sections']:
section_number = section['number']
if section_number.isdigit():
section_number = str(int(section_number))
# section is open
if section['openStatus']:
open_data[course_number].append(Section(section_number, section['index']))
# all of these course numbers are open
open_courses = [course for course, open_sections in open_data.iteritems() if open_sections]
if result:
return open_data
if open_courses:
# Notify people that were looking for these courses
snipes = Snipe.query.filter(Snipe.course_number.in_(open_courses), Snipe.subject==str(subject))
for snipe in snipes:
for section in open_data[snipe.course_number]:
if section.number == snipe.section:
notify(snipe, section.index)
else:
app.logger.warning('Subject "%s" has no open courses' % (subject))
else:
app.logger.warning('Subject "%s" is not valid' % (subject))
def notify(snipe, index):
""" Notify this snipe that their course is open"""
course = '%s:%s:%s' % (snipe.subject, snipe.course_number, snipe.section)
if snipe.user.email:
attributes = {
'email': snipe.user.email,
'subject': snipe.subject,
'course_number': snipe.course_number,
'section': snipe.section,
}
# build the url for prepopulated form
url = 'http://sniper.rutgers.io/?%s' % (urllib.urlencode(attributes))
register_url = 'https://sims.rutgers.edu/webreg/editSchedule.htm?login=cas&semesterSelection=12017&indexList=%s' % (index)
email_text = 'A course (%s) that you were watching looks open. Its index number is %s. Click the link below to register for it!\n\n %s \n\n If you don\'t get in, visit this URL: \n\n %s \n\n to continue watching it.\n\n Send any feedback to [email protected]' % (course, index, register_url, url)
# send out the email
message = Message('[Course Sniper](%s) is open' %(course), sender=EMAIL_SENDER)
message.body = email_text
message.add_recipient(snipe.user.email)
message.add_recipient(snipe.user.email)
mail.send(message)
db.session.delete(snipe)
db.session.commit()
app.logger.warning('Notified user: %s about snipe %s' % (snipe.user, snipe))
if __name__ == '__main__':
# get all the courses that should be queried.
app.logger.warning("----------- Running the Cron %s " % (str(datetime.datetime.now())))
subjects = db.session.query(Snipe.subject).distinct().all()
for subject in subjects:
poll(subject[0])
| [
"[email protected]"
]
| |
3119ea6af239d63712279b5d9972ab77083b0507 | d906b38849fcb8eb26dc584dfb03d9ed5a133abb | /pendulum.py | cdc8f0dcaa2b2591c1eb4e512b073da368453a54 | []
| no_license | basicmachines/sim-dynamics | dd5213f10b7a8bbc325e492b41714ceee45e0c1c | 3430651a5b684ecca4a0ceb282213070f379c2fd | refs/heads/master | 2020-03-08T14:11:40.689788 | 2018-04-10T05:53:21 | 2018-04-10T05:53:21 | 128,178,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,355 | py | #!/usr/bin/env python
"""Dynamic simulation of a pendulum.
"""
from pygame.locals import K_z, K_x, K_c, K_b, K_n, K_m
from simulator import Simulator
# Import the model you want to simulate
from models import Pendulum
# Import the controller(s) you want to simulate
from controllers import PIDController, KeyboardInput
TARGET_FPS = 30
# Initialize model
model = Pendulum(position=(16, 12))
# ----------- Setup Keyboard Controller ---------------
# Map the keys to the model inputs
key_actions = {
K_m: 'TP3', # Positive torque values (counter-clockwise)
K_n: 'TP2',
K_b: 'TP1',
K_z: 'TN3', # Negative torque values (clockwise)
K_x: 'TN2',
K_c: 'TN1'
}
kbd_controller = KeyboardInput(model.inputs, key_actions=key_actions)
key_instructions = [
'z, x, c - apply anti-clockwise torque',
'b, n, m - apply clockwise torque'
]
# ----------- Setup PID Controller ---------------
pid_controller = PIDController(
cv=model.outputs['a'],
mv=model.inputs,
kp=75.0,
ki=8.0,
kd=300.0,
set_point=0.0,
mv_max=7,
mv_min=-7,
bool_outputs=model.torque_settings,
time_step=1.0 / TARGET_FPS
)
# ------------- Run Simulation -----------------
simulator = Simulator(
model=model,
controllers=[kbd_controller, pid_controller],
key_instructions=key_instructions
)
simulator.run()
| [
"[email protected]"
]
| |
16eb4f0e51d45f39b17d70bcf2a407765c928ad8 | 5bd4893a793ed739127f15becd9558cacf461540 | /scripts/hit_endpoint.py | b86fbea7d7419186ef9c482e8f1b00b6d7f17c8a | []
| no_license | hauensteina/ahn-repo | d3aa665eeef846e426b866d587e8649c8283e74c | 93bd7c54548a083f39510fc562c9e7540c4f672a | refs/heads/master | 2023-07-24T05:34:51.289699 | 2023-07-13T16:10:25 | 2023-07-13T16:10:25 | 99,860,476 | 0 | 1 | null | 2023-07-15T01:33:35 | 2017-08-09T23:20:28 | Python | UTF-8 | Python | false | false | 739 | py | #!/usr/bin/env python
# Python 3 script hitting a REST endpoint
# AHN, Jun 2019
import requests
from pdb import set_trace as BP
URL = 'https://ahaux.com/leela_server/select-move/leela_gtp_bot?tt=1234'
ARGS = {'board_size':19,'moves':[],'config':{'randomness':0.5,'request_id':'0.6834311059880898'}}
#-------------
def main():
res = hit_endpoint( URL, ARGS)
print( res)
# Hit an endpoint with a POST request
#----------------------------------------
def hit_endpoint( url, args):
try:
resp = requests.post( url, json=args)
res = resp.json()
return res
except Exception as e:
print( 'ERROR: hit_endpoint() failed: %s' % str(e))
sys.exit(1)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
552ac5116d0dbc29272076004d4a9b916cb2a96e | a9fc496e0724866093dbb9cba70a8fdce12b67a9 | /scripts/field/q59000_tuto.py | 5925ba6e70b43edd7737755c40baf93619dad4ae | [
"MIT"
]
| permissive | ryantpayton/Swordie | b2cd6b605f7f08f725f5e35d23ba3c22ef2ae7c0 | ca6f42dd43f63b1d2e6bb5cdc8fc051c277f326e | refs/heads/master | 2022-12-01T09:46:47.138072 | 2020-03-24T10:32:20 | 2020-03-24T10:32:20 | 253,997,319 | 2 | 0 | MIT | 2022-11-24T08:17:54 | 2020-04-08T05:50:22 | Java | UTF-8 | Python | false | false | 1,222 | py | # Arboren : Stump Town
if not sm.hasQuest(59000): # The Town Prankster
if not sm.hasQuestCompleted(59000): # The Town Prankster
sm.removeEscapeButton()
if sm.sendAskYesNo("Would you like to skip the tutorial cutscenes?"):
#todo add after skipping tutorial
sm.dispose()
else:
sm.setPlayerAsSpeaker()
sm.sendNext("Dun, dun, dun. Hero theme song! I'm #h #, I'm from a town hidden deeep within Arboren Forest!")
sm.sendNext("I've got the coolest ears and tail, dun dun dun. They're super heroic, dun dun dun.")
sm.sendNext("And I'm gonna be a hero somedaaaaay. A hero somedaaaaay! Drumroll!")
sm.sendNext("For reals. Granny Rosanna tells me bedtime stories every night...")
sm.sendNext("Stories about the #bfive brave heroes#k, who sealed away the terrifying #bBlack Mage#k! \r\n Pew, pew, kaboom! I'm gonna be a hero just like 'em someday soon!")
sm.setSpeakerID(9390305)
#todo effects
sm.sendNext("Who'd dig a hole here!?")
sm.setPlayerAsSpeaker()
sm.sendNext("Uh oh, what's this? I smell... the need for a Hero!")
sm.dispose() | [
"[email protected]"
]
| |
ec7fa1f86c2a000110ed3e35ad2f81201ff443b7 | cb062c48280311134fe22573a41f9c4d6631b795 | /src/xm/core/txs/multisig/MultiSigVote.py | a05b6202d10dcb74e58f0c5ec2605ebb1c0396e9 | [
"MIT"
]
| permissive | xm-blockchain/xm-core | da1e6bb4ceb8ab642e5d507796e2cc630ed23e0f | 2282b435a02f061424d656155756d8f50238bcfd | refs/heads/main | 2023-01-15T19:08:31.399219 | 2020-11-19T03:54:19 | 2020-11-19T03:54:19 | 314,127,428 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,528 | py | from pyxmlib.pyxmlib import bin2hstr
from xm.core.State import State
from xm.core.StateContainer import StateContainer
from xm.core.misc import logger
from xm.core.txs.Transaction import Transaction
from xm.crypto.misc import sha256
class MultiSigVote(Transaction):
"""
MultiSigSpend for the transaction of xm from a multi sig wallet to another wallet.
"""
def __init__(self, protobuf_transaction=None):
super(MultiSigVote, self).__init__(protobuf_transaction)
@property
def shared_key(self):
return self._data.multi_sig_vote.shared_key
@property
def unvote(self):
return self._data.multi_sig_vote.unvote
@property
def prev_tx_hash(self):
return self._data.multi_sig_vote.prev_tx_hash
def set_prev_tx_hash(self, prev_tx_hash: bytes):
self._data.multi_sig_vote.prev_tx_hash = prev_tx_hash
def get_data_hash(self):
tmp_tx_hash = (self.master_addr +
self.fee.to_bytes(8, byteorder='big', signed=False) +
self.shared_key +
self.unvote.to_bytes(1, byteorder='big', signed=False))
return sha256(tmp_tx_hash)
@staticmethod
def create(shared_key: bytes,
unvote: bool,
fee: int,
xmss_pk,
master_addr: bytes = None):
multi_sig_vote = MultiSigVote()
if master_addr:
multi_sig_vote._data.master_addr = master_addr
multi_sig_vote._data.public_key = bytes(xmss_pk)
multi_sig_vote._data.multi_sig_vote.shared_key = shared_key
multi_sig_vote._data.multi_sig_vote.unvote = unvote
multi_sig_vote._data.fee = int(fee)
multi_sig_vote.validate_or_raise(verify_signature=False)
return multi_sig_vote
def _validate_custom(self):
if self.fee < 0:
logger.warning('MultiSigVote [%s] Invalid Fee = %d', bin2hstr(self.txhash), self.fee)
return False
return True
def _validate_extended(self, state_container: StateContainer):
if state_container.block_number < state_container.current_dev_config.hard_fork_heights[0]:
logger.warning("[MultiSigVote] Hard Fork Feature not yet activated")
return False
addr_from_state = state_container.addresses_state[self.addr_from]
vote_stats = state_container.votes_stats[self.shared_key]
if vote_stats is None:
logger.warning("[MultiSigVote] Invalid Shared key %s", bin2hstr(self.shared_key))
return False
multi_sig_spend_tx = state_container.multi_sig_spend_txs[self.shared_key]
block_number = state_container.block_number
if vote_stats.executed:
logger.warning("[MultiSigVote] Invalid Tx as MultiSigSpend has already been executed")
return False
if multi_sig_spend_tx is None:
logger.warning("MultiSigSpend not found, Shared Key %s", bin2hstr(self.shared_key))
return False
if block_number > multi_sig_spend_tx.expiry_block_number:
logger.warning("[MultiSigVote] Voted for expired Multi Sig Spend Txn")
logger.warning("Expiry Block Number: %s, Current Block Number: %s",
multi_sig_spend_tx.expiry_block_number,
block_number)
return False
if self.addr_from not in vote_stats.signatories:
logger.warning("Address not found in signatory list")
logger.warning("Address %s, Shared Key %s, Multi Sig Address %s",
bin2hstr(self.addr_from),
bin2hstr(self.shared_key),
bin2hstr(vote_stats.multi_sig_address))
return False
index = vote_stats.get_address_index(self.addr_from)
if vote_stats.unvotes[index] == self.unvote:
logger.warning("[MultiSigVote] Invalid as Vote type already executed")
logger.warning("Vote type %s", self.unvote)
return False
tx_balance = addr_from_state.balance
if tx_balance < self.fee:
logger.warning('[MultiSigVote] State validation failed for %s because: Insufficient funds',
bin2hstr(self.txhash))
logger.warning('balance: %s, fee: %s', tx_balance, self.fee)
return False
return True
def set_affected_address(self, addresses_set: set):
super().set_affected_address(addresses_set)
def apply(self,
state: State,
state_container: StateContainer) -> bool:
address_state = state_container.addresses_state[self.addr_from]
address_state.update_balance(state_container, self.fee, subtract=True)
state_container.paginated_tx_hash.insert(address_state, self.txhash)
vote_stats = state_container.votes_stats[self.shared_key]
multi_sig_address = vote_stats.multi_sig_address
weight, found = state_container.addresses_state[multi_sig_address].get_weight_by_signatory(self.addr_from)
if not found:
logger.info("[MultiSigVote] Address is not the signatory for the multi sig address")
return False
self.set_prev_tx_hash(vote_stats.get_vote_tx_hash_by_signatory_address(self.addr_from))
if not vote_stats.apply_vote_stats(self, weight, state_container):
logger.info("[MultiSigVote] Failed to apply vote_stats")
return False
return self._apply_state_changes_for_PK(state_container)
def revert(self,
state: State,
state_container: StateContainer) -> bool:
vote_stats = state_container.votes_stats[self.shared_key]
multi_sig_address = vote_stats.multi_sig_address
weight, found = state_container.addresses_state[multi_sig_address].get_weight_by_signatory(self.addr_from)
if not found:
logger.info("[MultiSigVote] Address is not the signatory for the multi sig address")
return False
if not vote_stats.revert_vote_stats(self, weight, state_container):
logger.info("[MultiSigVote] Failed to revert vote_stats")
return False
address_state = state_container.addresses_state[self.addr_from]
address_state.update_balance(state_container, self.fee)
state_container.paginated_tx_hash.remove(address_state, self.txhash)
return self._revert_state_changes_for_PK(state_container)
| [
"[email protected]"
]
| |
35bf4c7a9b21ab23ef52b6c6f0f8175b0648633a | 61afd923551491846ae827821f55c4fb5fd04c98 | /packages/levylab_lib_levylab_instruments/levylab_lib_levylab_instruments-1.4.2.53.spec | 04f92bcc228f4e1c73706452894072f1ab9ff991 | [
"BSD-3-Clause"
]
| permissive | laserengineer/levylabpitt.github.io | b74b711aff2a5eb1b46f880a1071ac0873f1a9ac | cdf9aeb6faaf136211291ce2232c239229d85bbe | refs/heads/master | 2023-04-29T02:36:48.736236 | 2021-05-14T19:20:40 | 2021-05-14T19:20:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,436 | spec | [Package]
Name="levylab_lib_levylab_instruments"
Version="1.4.2.53"
Release=""
ID=26edd37d85e86c8ffd7cf08a57fa4313
File Format="vip"
Format Version="2017"
Display Name="Instruments"
[Description]
Description="Abstract instrument.lvclass"
Summary=""
License="BSD-3"
Copyright="Copyright (c) 2020, Levylab"
Distribution=""
Vendor="Levylab"
URL=""
Packager="Patrick Irvin"
Demo="FALSE"
Release Notes="[1.4.2.53]\0A- Add "Finished?" boolean to Get Magnet/Temperature/Delay\0A- Change the way the Instrument SMO is configured (dev must override SMO Name/Port/Public API/RC Tyle - constant.vi)"
System Package="FALSE"
Sub Package="FALSE"
License Agreement="TRUE"
[LabVIEW]
close labview before install="FALSE"
restart labview after install="FALSE"
skip mass compile after install="FALSE"
[Platform]
Exclusive_LabVIEW_Version="LabVIEW>=16.0"
Exclusive_LabVIEW_System="ALL"
Exclusive_OS="ALL"
[Script VIs]
PreInstall=""
PostInstall=""
PreUninstall=""
PostUninstall=""
Verify=""
PreBuild=""
PostBuild=""
[Dependencies]
AutoReqProv=FALSE
Requires="jki_lib_state_machine>=2018.0.7.45,jki_statemachineobjects>=1.3.0.56,mgi_lib_application_control>=1.1.1.10,mgi_lib_error_handling>=1.1.1.3,mgi_lib_error_reporter>=1.0.2.5,national_instruments_lib_guid_generator>=1.0.2.3,ni_lib_stm>=3.1.0.9,oglib_appcontrol>=4.1.0.7,oglib_error>=4.2.0.23,oglib_file>=4.0.1.22,oglib_lvdata>=4.2.0.21,oglib_numeric>=4.1.0.8,oglib_string>=4.1.0.12,oglib_time>=4.0.1.3,oglib_variantconfig>=4.0.0.5"
Conflicts=""
[Activation]
License File=""
Licensed Library=""
[Files]
Num File Groups="3"
Sub-Packages=""
Namespaces=""
[File Group 0]
Target Dir="<application>"
Replace Mode="Always"
Num Files=202
File 0="user.lib/Levylab/Levylab Instruments/instrument.lvproj"
File 1="user.lib/Levylab/Levylab Instruments/LICENSE"
File 2="user.lib/Levylab/Levylab Instruments/README.md"
File 3="user.lib/Levylab/Levylab Instruments/SMOs/SCPI/SCPI Decode.vi"
File 4="user.lib/Levylab/Levylab Instruments/SMOs/SCPI/SCPI Encode.vi"
File 5="user.lib/Levylab/Levylab Instruments/SMOs/SCPI/SCPI.lvclass"
File 6="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Close Connection.vi"
File 7="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Connection Monitor - Loop.vi"
File 8="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Connection Monitor - Stop.vi"
File 9="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/onCreate.vi"
File 10="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Open Client Connection.vi"
File 11="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Open Server Connection.vi"
File 12="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Read listener ID.vi"
File 13="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Read Message.vi"
File 14="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Read STM connection info.vi"
File 15="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/RemoteControl.STM.lvclass"
File 16="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/RemoteControl.STM.TestLauncher.vi"
File 17="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Send Message.vi"
File 18="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/STM_Client.vi"
File 19="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/STM_Client_OO.vi"
File 20="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/STM_Client_SM.vi"
File 21="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/STM_Server.vi"
File 22="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/STM_Server_part_OO.vi"
File 23="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Write listener ID.vi"
File 24="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl.STM/Write STM connection info.vi"
File 25="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Client - JKI SM or SMO.vi"
File 26="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Close Connection.vi"
File 27="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Connection Monitor - Loop.vi"
File 28="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Connection Monitor - Stop.vi"
File 29="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Create RC Client.vi"
File 30="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Create RC Server.vi"
File 31="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/CreatePrivateEvents.vi"
File 32="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/CreatePublicEvents.vi"
File 33="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/DestroyPrivateEvents.vi"
File 34="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/DestroyPublicEvents.vi"
File 35="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Error Dialog.vi"
File 36="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/onCreate.vi"
File 37="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Open Client Connection.vi"
File 38="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Open Server Connection.vi"
File 39="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Process.vi"
File 40="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Process_Backup.vi"
File 41="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Read address.vi"
File 42="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Read Commands.vi"
File 43="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Read Message.vi"
File 44="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Read Port.vi"
File 45="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Remote Client.vi"
File 46="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.Configure.vi"
File 47="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.GetPrivateEvents.vi"
File 48="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.GetPublicEvents.vi"
File 49="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.lvclass"
File 50="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.SendMessageFromProcess.vi"
File 51="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.SendMessageToProcess.vi"
File 52="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.TestLauncher.ConnectionMonitor.vi"
File 53="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControl.TestLauncher.Server.vi"
File 54="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/RemoteControlGlobal.vi"
File 55="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Send and Receive Message.vi"
File 56="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Send Message.vi"
File 57="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Time Message Events.vi"
File 58="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/VirtualTestInstrument.vi"
File 59="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Write address.vi"
File 60="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Write Commands.vi"
File 61="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Write Port.vi"
File 62="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/Message--Cluster.ctl"
File 63="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PrivateEvents--Cluster.ctl"
File 64="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PrivateEvents--RemoteControl.Configure.ctl"
File 65="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PrivateEvents--RemoteControl.Reply Message.ctl"
File 66="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PrivateEvents--RemoteControl.SendMessageToProcess.ctl"
File 67="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PublicEvents--Cluster.ctl"
File 68="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PublicEvents--RemoteControl.Reply Remote Message.ctl"
File 69="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/PublicEvents--RemoteControl.SendMessageFromProcess.ctl"
File 70="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/RC Process Type--Enum.ctl"
File 71="user.lib/Levylab/Levylab Instruments/SMOs/RemoteControl/Typedefs/Variant with Message ID--cluster.ctl"
File 72="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/CreatePrivateEvents.vi"
File 73="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/DestroyPrivateEvents.vi"
File 74="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Error Log Generator Example 1.vi"
File 75="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Log Error.vi"
File 76="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Logger.Error.GetPrivateEvents.vi"
File 77="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Logger.Error.LogError.vi"
File 78="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Logger.Error.lvclass"
File 79="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Logger.Error.TestLauncher.vi"
File 80="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Process.vi"
File 81="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Typedefs/PrivateEvents--Cluster.ctl"
File 82="user.lib/Levylab/Levylab Instruments/SMOs/Logger.Error/Typedefs/PrivateEvents--Logger.Error.Log Error.ctl"
File 83="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/CreatePrivateEvents.vi"
File 84="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/CreatePublicEvents.vi"
File 85="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/DestroyPrivateEvents.vi"
File 86="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/DestroyPublicEvents.vi"
File 87="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.DSC.GetPublicEvents.vi"
File 88="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.DSC.lvclass"
File 89="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.DSC.TestLauncher.vi"
File 90="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.DSC.WriteVariable.vi"
File 91="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.GetPrivateEvents.vi"
File 92="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Logger.ReadVariable.vi"
File 93="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Process.vi"
File 94="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Read from DSC (unused).vi"
File 95="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Write Cluster to DSC.vi"
File 96="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Write to DSC.vi"
File 97="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Typedefs/PrivateEvents--Cluster.ctl"
File 98="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Typedefs/PrivateEvents--Logger.DSC.Set Address.ctl"
File 99="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Typedefs/PrivateEvents--Logger.DSC.Write Variable.ctl"
File 100="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Typedefs/PublicEvents--Cluster.ctl"
File 101="user.lib/Levylab/Levylab Instruments/SMOs/Logger.DSC/Typedefs/PublicEvents--Logger.DSC.Read Variable.ctl"
File 102="user.lib/Levylab/Levylab Instruments/SMOs/Logger/Logger.lvclass"
File 103="user.lib/Levylab/Levylab Instruments/SMOs/Logger/Logger.TestLauncher.vi"
File 104="user.lib/Levylab/Levylab Instruments/SMOs/Logger/Process.vi"
File 105="user.lib/Levylab/Levylab Instruments/SMOs/Logger/Read path.vi"
File 106="user.lib/Levylab/Levylab Instruments/SMOs/Logger/Write path.vi"
File 107="user.lib/Levylab/Levylab Instruments/SMOs/LevyLab/Handle Error.vi"
File 108="user.lib/Levylab/Levylab Instruments/SMOs/LevyLab/LevyLab.lvclass"
File 109="user.lib/Levylab/Levylab Instruments/SMOs/LevyLab/LevyLab.TestLauncher.vi"
File 110="user.lib/Levylab/Levylab Instruments/SMOs/LevyLab/Process.vi"
File 111="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/API Commands--constant.vi"
File 112="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Close Instrument.vi"
File 113="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Configure Instrument.vi"
File 114="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Create Instrument.vi"
File 115="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/CreatePrivateEvents.vi"
File 116="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/CreatePublicEvents.vi"
File 117="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/DestroyPrivateEvents.vi"
File 118="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/DestroyPublicEvents.vi"
File 119="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/enumerateStaticDependencies.vi"
File 120="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Error Dialog.vi"
File 121="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Get Instrument Dependencies.vi"
File 122="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/getAll.vi"
File 123="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Handle Command.vi"
File 124="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Handle Error.vi"
File 125="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.Configuration Window.vi"
File 126="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.GetPrivateEvents.vi"
File 127="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.GetPublicEvents.vi"
File 128="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.lvclass"
File 129="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.MessageFromProcess.vi"
File 130="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.MessageToProcess.vi"
File 131="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.Read Configuration File.vi"
File 132="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.Read Configuration.vi"
File 133="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.TestLauncher.vi"
File 134="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.Write Configuration File.vi"
File 135="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Instrument.Write Configuration.vi"
File 136="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/List Devices.vi"
File 137="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Open Instrument.vi"
File 138="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Process.vi"
File 139="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Read Configuration Class.vi"
File 140="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Read Hardware Address.vi"
File 141="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Remote Client.vi"
File 142="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/SMO Name - constant.vi"
File 143="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/SMO Port - constant.vi"
File 144="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/SMO Public API - constant.vi"
File 145="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/SMO RC Type - constant.vi"
File 146="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Write Configuration Class.vi"
File 147="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Write Configuration Path.vi"
File 148="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Write Hardware Address.vi"
File 149="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Write Process Name.vi"
File 150="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Write SMO Configuration.vi"
File 151="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/Configuration--Cluster.ctl"
File 152="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/DSC Configuration--Cluster.ctl"
File 153="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/HW Configuration--Cluster.ctl"
File 154="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/PrivateEvents--Cluster.ctl"
File 155="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/PrivateEvents--Instrument.MessageToProcess.ctl"
File 156="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/PublicEvents--Cluster.ctl"
File 157="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/PublicEvents--Instrument.get all.ctl"
File 158="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/PublicEvents--Instrument.MessageFromProcess.ctl"
File 159="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/SMO Configuration--Cluster.ctl"
File 160="user.lib/Levylab/Levylab Instruments/SMOs/Instrument/Typedefs/SMO RC Type--enum.ctl"
File 161="user.lib/Levylab/Levylab Instruments/SMOs/Configuration/Configuration.lvclass"
File 162="user.lib/Levylab/Levylab Instruments/SMOs/Configuration/Read Configuration.vi"
File 163="user.lib/Levylab/Levylab Instruments/SMOs/Configuration/Write Configuration.vi"
File 164="user.lib/Levylab/Levylab Instruments/SMOs/Configuration/Write Path.vi"
File 165="user.lib/Levylab/Levylab Instruments/Instrument Types/VSource/Get Bias Voltage.vi"
File 166="user.lib/Levylab/Levylab Instruments/Instrument Types/VSource/Instrument.VSource.lvclass"
File 167="user.lib/Levylab/Levylab Instruments/Instrument Types/VSource/Set Bias Voltage.vi"
File 168="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Get Data.vi"
File 169="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Instrument.VNA.lvclass"
File 170="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Set Average.vi"
File 171="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Set Format.vi"
File 172="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Set Measurement.vi"
File 173="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Set Power.vi"
File 174="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Set Sweep.vi"
File 175="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Typedefs/Conversion--enum.ctl"
File 176="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Typedefs/Format--enum.ctl"
File 177="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Typedefs/Measurement--enum.ctl"
File 178="user.lib/Levylab/Levylab Instruments/Instrument Types/VNA/Typedefs/Sweep Type--enum.ctl"
File 179="user.lib/Levylab/Levylab Instruments/Instrument Types/Strain/Get Strain.vi"
File 180="user.lib/Levylab/Levylab Instruments/Instrument Types/Strain/Instrument.Strain.lvclass"
File 181="user.lib/Levylab/Levylab Instruments/Instrument Types/Strain/Set Strain.vi"
File 182="user.lib/Levylab/Levylab Instruments/Instrument Types/Optical Delay Line/Get Delay.vi"
File 183="user.lib/Levylab/Levylab Instruments/Instrument Types/Optical Delay Line/Instrument.OpticalDelayLine.lvclass"
File 184="user.lib/Levylab/Levylab Instruments/Instrument Types/Optical Delay Line/Set Delay.vi"
File 185="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Angle.vi"
File 186="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Helium Level.vi"
File 187="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Magnet Field.vi"
File 188="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Nitrogen Level.vi"
File 189="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Pressure.vi"
File 190="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Get Temperature.vi"
File 191="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Instrument.Cryostat.lvclass"
File 192="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Set Angle.vi"
File 193="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Set Magnet Field.vi"
File 194="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Set Temperature.vi"
File 195="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Wait for Magnet Setpoint.vi"
File 196="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Wait for Temperature Setpoint.vi"
File 197="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Typedefs/Magnet Axis--Enum.ctl"
File 198="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Typedefs/Magnet Mode--Enum.ctl"
File 199="user.lib/Levylab/Levylab Instruments/Instrument Types/Cryostat/Typedefs/Rotator Axis--Enum.ctl"
File 200="user.lib/Levylab/Levylab Instruments/Instrument Types/CBridge/Get Capacitance.vi"
File 201="user.lib/Levylab/Levylab Instruments/Instrument Types/CBridge/Instrument.CBridge.lvclass"
[File Group 1]
Target Dir="<menus>/Categories/Levylab"
Replace Mode="Always"
Num Files=18
File 0="_functions_levylab_lib_levylab_instruments_1.mnu"
File 1="_functions_levylab_lib_levylab_instruments_10.mnu"
File 2="_functions_levylab_lib_levylab_instruments_11.mnu"
File 3="_functions_levylab_lib_levylab_instruments_12.mnu"
File 4="_functions_levylab_lib_levylab_instruments_13.mnu"
File 5="_functions_levylab_lib_levylab_instruments_14.mnu"
File 6="_functions_levylab_lib_levylab_instruments_15.mnu"
File 7="_functions_levylab_lib_levylab_instruments_16.mnu"
File 8="_functions_levylab_lib_levylab_instruments_17.mnu"
File 9="_functions_levylab_lib_levylab_instruments_2.mnu"
File 10="_functions_levylab_lib_levylab_instruments_3.mnu"
File 11="_functions_levylab_lib_levylab_instruments_4.mnu"
File 12="_functions_levylab_lib_levylab_instruments_5.mnu"
File 13="_functions_levylab_lib_levylab_instruments_6.mnu"
File 14="_functions_levylab_lib_levylab_instruments_7.mnu"
File 15="_functions_levylab_lib_levylab_instruments_8.mnu"
File 16="_functions_levylab_lib_levylab_instruments_9.mnu"
File 17="functions_Levylab_lib_Levylab_Instruments.mnu"
[File Group 2]
Target Dir="<menus>/Categories/Levylab"
Replace Mode="If Newer"
Num Files=1
File 0="dir.mnu"
| [
"[email protected]"
]
| |
75528b21f1eac2877ca966946d1370e81593004b | 05c6b9f1f769ff359b757a913e0d43aeb1dfb9c6 | /hcf.py | f5cd7c9c45512a220634fefb6a6049cfbdbaad6c | []
| no_license | samarthchadda/pypi | bc92c0f7086ead65cb7242f7ea827470817a3b55 | 0c3308be995c8952c8db6b56aae46e58722d4d82 | refs/heads/master | 2020-04-01T16:48:13.337978 | 2018-11-04T10:17:59 | 2018-11-04T10:17:59 | 153,399,057 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 323 | py | def computeHCF(x,y):
if x>y:
smaller=y
else:
smaller=x
for i in range(1,smaller+1):
if((x%i==0) and (y%i==0)):
hcf=i
return hcf
num1=int(input("Enter first number:"))
num2=int(input("Enter second nunber:"))
print("H.C.F of",num1,"and",num2,"is",computeHCF(num1,num2))
| [
"[email protected]"
]
| |
74642e8877743f8591bc0e8ec061ab3c92d67f5a | 6a803f0be359651a68107ccc2452be58e178d54b | /test/test_tojson.py | ba97f228529b58dca993c52a07690caadab47f87 | [
"MIT"
]
| permissive | pombredanne/javaproperties-cli | 14c8a067ec8a4af6bb8ac25e64117fafb7e0238e | 192f96a9ffa504ed3c0fd9636f7a321b65f8cad4 | refs/heads/master | 2020-12-25T18:42:39.656917 | 2017-05-17T13:10:48 | 2017-05-17T13:10:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,347 | py | from click.testing import CliRunner
from javaproperties_cli.tojson import properties2json
def test_properties2json_empty():
r = CliRunner().invoke(properties2json, input=b'')
assert r.exit_code == 0
assert r.output_bytes == b'{}\n'
def test_properties2json_comment_only():
r = CliRunner().invoke(properties2json, input=b'#This is a comment.\n')
assert r.exit_code == 0
assert r.output_bytes == b'{}\n'
def test_properties2json_simple():
r = CliRunner().invoke(properties2json, input=b'''
#Mon Nov 07 15:29:40 EST 2016
key = value
foo: bar
zebra apple
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"foo": "bar",
"key": "value",
"zebra": "apple"
}
'''
def test_properties2json_scalarlike():
r = CliRunner().invoke(properties2json, input=b'''
#Mon Nov 07 15:29:40 EST 2016
key = 42
foo: 3.14
zebra null
true=false
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"foo": "3.14",
"key": "42",
"true": "false",
"zebra": "null"
}
'''
def test_properties2json_empty_value():
r = CliRunner().invoke(properties2json, input=b'''
#Mon Nov 07 15:29:40 EST 2016
empty=
missing
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"empty": "",
"missing": ""
}
'''
def test_properties2json_escaped_nonascii_input():
r = CliRunner().invoke(properties2json, input=b'''
#Mon Nov 07 15:29:40 EST 2016
edh: \\u00F0
snowman: \\u2603
goat: \\uD83D\\uDC10
\\u00F0: edh
\\uD83D\\uDC10: goat
\\u2603: snowman
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"edh": "\\u00f0",
"goat": "\\ud83d\\udc10",
"snowman": "\\u2603",
"\\u00f0": "edh",
"\\u2603": "snowman",
"\\ud83d\\udc10": "goat"
}
'''
def test_properties2json_utf8_input_no_encoding():
r = CliRunner().invoke(properties2json, input=b'''
#Mon Nov 07 15:29:40 EST 2016
edh: \xC3\xB0
snowman: \xE2\x98\x83
goat: \xF0\x9F\x90\x90
\xC3\xB0: edh
\xF0\x9F\x90\x90: goat
\xE2\x98\x83: snowman
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"edh": "\\u00c3\\u00b0",
"goat": "\\u00f0\\u009f\\u0090\\u0090",
"snowman": "\\u00e2\\u0098\\u0083",
"\\u00c3\\u00b0": "edh",
"\\u00e2\\u0098\\u0083": "snowman",
"\\u00f0\\u009f\\u0090\\u0090": "goat"
}
'''
def test_properties2json_utf8_input():
r = CliRunner().invoke(properties2json, ['--encoding', 'utf-8'], input=b'''
#Mon Nov 07 15:29:40 EST 2016
edh: \xC3\xB0
snowman: \xE2\x98\x83
goat: \xF0\x9F\x90\x90
\xC3\xB0: edh
\xF0\x9F\x90\x90: goat
\xE2\x98\x83: snowman
''')
assert r.exit_code == 0
assert r.output_bytes == b'''{
"edh": "\\u00f0",
"goat": "\\ud83d\\udc10",
"snowman": "\\u2603",
"\\u00f0": "edh",
"\\u2603": "snowman",
"\\ud83d\\udc10": "goat"
}
'''
def test_properties2json_utf16_input():
r = CliRunner().invoke(properties2json, ['--encoding', 'utf-16BE'], input=u'''
#Mon Nov 07 15:29:40 EST 2016
edh: \u00F0
snowman: \u2603
goat: \U0001F410
\u00F0: edh
\U0001F410: goat
\u2603: snowman
'''.encode('UTF-16BE'))
assert r.exit_code == 0
assert r.output_bytes == b'''{
"edh": "\\u00f0",
"goat": "\\ud83d\\udc10",
"snowman": "\\u2603",
"\\u00f0": "edh",
"\\u2603": "snowman",
"\\ud83d\\udc10": "goat"
}
'''
# repeated keys?
# invalid \u escape
| [
"[email protected]"
]
| |
5f7c0cdac07becdf70d55f1915794e2a91b1e177 | 8c51aff248eb6f463d62e934213660437c3a107b | /django_project/users/views.py | fe5316a90de09acd0372252d67941f52e802b19c | []
| no_license | wonjun0901/WJ_Develop_Individually | 5f839932c189adf2b2b34f7dadbdeaa8744f8d0e | e0402f5dbdda8ae8292cace124d381e29f707183 | refs/heads/master | 2021-01-02T00:13:38.851832 | 2020-02-18T01:10:15 | 2020-02-18T01:10:15 | 239,406,395 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 613 | py | from django.shortcuts import render, redirect
#from django.contrib.auth.forms import UserCreationForm
from django.contrib import messages
from .forms import UserRegisterForm
def register(request):
if request.method == 'POST':
form = UserRegisterForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
messages.success(request, f'Account created for {username}!')
return redirect('blog-home')
else:
form = UserRegisterForm()
return render(request, 'users/register.html', {'form': form})
| [
"[email protected]"
]
| |
e82585fce52c800d045ff51b94242a83f0126930 | 653a3d9d66f3d359083cb588fc7c9ece8bb48417 | /test/runtime/frontend_test/onnx_test/defs_test/math_test/max_test.py | 8aaf5db7ac2b8cfd0529aa58330acb3221cbd3dc | [
"Zlib",
"MIT"
]
| permissive | leonskim/webdnn | fec510254b15f3dec00f5bed8f498737b372e470 | f97c798c9a659fe953f9dc8c8537b8917e4be7a2 | refs/heads/master | 2020-04-15T18:42:43.632244 | 2019-01-10T10:07:18 | 2019-01-10T10:07:18 | 164,921,764 | 0 | 0 | NOASSERTION | 2019-01-09T19:07:35 | 2019-01-09T19:07:30 | Python | UTF-8 | Python | false | false | 1,306 | py | import numpy as np
from test.runtime.frontend_test.onnx_test.util import make_node, make_tensor_value_info, make_model
from test.util import wrap_template, generate_kernel_test_case
from webdnn.frontend.onnx import ONNXConverter
@wrap_template
def template(n_x, x_shape, description: str = ""):
vxs = [np.random.rand(*x_shape) for _ in range(n_x)]
vys = list(vxs)
while len(vys) > 1:
vx1, vx2 = vys.pop(0), vys.pop(0)
vy = np.maximum(vx1, vx2)
vys.append(vy)
vy = vys[0]
xs = [make_tensor_value_info(f"x{i}", vx.shape) for i, vx in enumerate(vxs)]
y = make_tensor_value_info("y", vy.shape)
operator = make_node("Max", [x.name for x in xs], ["y"])
model = make_model([operator], xs, [y])
graph = ONNXConverter().convert(model)
assert tuple(vy.shape) == tuple(graph.outputs[0].shape), f"vy: {vy.shape}, graph.outputs[0]: {graph.outputs[0].shape}"
generate_kernel_test_case(
description=f"[ONNX] Max {description}",
graph=graph,
inputs={graph.inputs[i]: vx for i, vx in enumerate(vxs)},
expected={graph.outputs[0]: vy},
)
def test_2():
template(n_x=2, x_shape=[2, 3, 4, 5])
def test_3():
template(n_x=3, x_shape=[2, 3, 4, 5])
def test_4():
template(n_x=4, x_shape=[2, 3, 4, 5])
| [
"[email protected]"
]
| |
ce84cfc7e6a9774842cef1a393e8ef625284ae06 | 7f189b1d917785da079276674276f68baa30df7f | /kmmall/pipelines.py | 1d30292b0e08e1835ac775ee2d640e6b34c9e8bb | []
| no_license | eninem123/kangmeimallspider | b79ed43781328d67e893652433e59ed094ec941a | b2bcca0efe0b634ca97f331242351e9cfd52c2f7 | refs/heads/master | 2022-12-24T04:31:31.169097 | 2018-09-20T09:58:41 | 2018-09-20T09:58:41 | 149,369,473 | 1 | 0 | null | 2022-12-08T00:45:48 | 2018-09-19T00:38:33 | Python | UTF-8 | Python | false | false | 2,637 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
# import json
# from scrapy.exporters import CsvItemExporter
import csv
# class KmmallPipeline(object):
# def process_item(self, item, spider):
# return item
class KmCsvPipeline(object):
def open_spider(self, spider):
# 创建文件对象
self.f = open("km2.csv", "w+")
# 这种写法还没搞清楚怎么用
# self.csv_exporter = CsvItemExporter(self.f)
# 开始进行csv数据读写
# self.csv_exporter.start_exporting()
# 创建csv文件读写对象,用来将item数据写入到指定的文件中
self.csv_writer = csv.writer(self.f, delimiter=',')
def process_item(self, item, spider):
# 将item数据通过csv文件读写对象,写入到csv文件中
# 将item变成字典对象
item = dict(item)
# 如果需要保存json就dumps一下
# item = json.dumps(item, ensure_ascii=False)
# self.csv_exporter.export_item(item.encode("utf8"))
print('*******************************************************item:', item)
print('*******************************************************item:', type(item))
# print('*******************************************************item:', item['goods_url'])
# 提取字典对象的数据类型是class:dict 数据格式类似 {item:{"key1":"val1","key2":"val2"...}}
one=item['one']
two=item['two']
two_url=item['two_url']
three=item['three']
three_url=item['three_url']
title=item['title']
title_two=item['title_two']
price=item['price']
goods_url=item['goods_url']
market_price = item['market_price']
spec=item['spec']
count_comment=item['count_comment']
goods_name=item['goods_name']
goods_no=item['goods_no']
goods_pz=item['goods_pz']
goods_logo=item['goods_logo']
goods_spec=item['goods_spec']
goods_jx=item['goods_jx']
goods_cj=item['goods_cj']
self.csv_writer.writerow([one,two,two_url,three,three_url,title,title_two,price,market_price,spec, goods_url,count_comment,goods_name,goods_no,goods_pz,goods_logo,goods_spec,goods_jx,goods_cj])
return item
def close_spider(self, spider):
# 结束csv文件读写
# self.csv_exporter.finish_exporting()
# 关闭文件,将内存缓冲区的数据写入到磁盘中
self.f.close()
| [
"[email protected]"
]
| |
70a5a2a8d97d47e4470a414ce3590f34bca83b74 | 22e076588057d200c7119f87d330678e7ed7d168 | /posts/forms.py | 8094a686d31545ae191391fd805ca09373a1291f | []
| no_license | DylanMsK/Insta_clone_project | 16088926bda8f66fe412016f1764076dd46a7629 | 7921bef90aad1128021bd4e2bb60f96fd0efab01 | refs/heads/master | 2020-05-05T13:03:29.896332 | 2019-04-18T09:01:33 | 2019-04-18T09:01:33 | 180,057,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | from django import forms
from .models import Post, Comment
class PostModelForm(forms.ModelForm):
content = forms.CharField(
label='content',
widget=forms.Textarea(
attrs={
'class': '',
'rows': 5,
'cols': 50,
'placeholder': '지금 뭘 하고 계신가요?'
})
)
class Meta:
model = Post
# input을 받을 컬럼 값을 list로 만들어 넣어줌
fields = ['content', 'image',]
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ['comment',]
| [
"[email protected]"
]
| |
18e350c9f21878bc1409a1ec2b3304e103c6c660 | 528c811306faa4a34bf51fca7955b7a24ac2e30c | /Python/Triangle.py | da2d8206529278895eea530d8c2d8f3d4bc40ef4 | []
| no_license | ganjingcatherine/LeetCode-1 | 1addbd7e4d9254a146601f9d5e28b8becb8235a6 | 488782d3f1e759da2d32b4e82dbf55b96c431244 | refs/heads/master | 2021-05-11T03:15:16.810035 | 2016-02-06T06:19:18 | 2016-02-06T06:19:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,529 | py | """
Given a triangle, find the minimum path sum from top to bottom. Each step you may move to adjacent numbers on the row below.
For example, given the following triangle
[
[2],
[3,4],
[6,5,7],
[4,1,8,3]
]
The minimum path sum from top to bottom is 11 (i.e., 2 + 3 + 5 + 1 = 11).
Note:
Bonus point if you are able to do this using only O(n) extra space, where n is the total number of rows in the triangle.
"""
class Solution:
# @param triangle, a list of lists of integers
# @return an integer
def minimumTotal(self, triangle):
d = [[0 for _ in range(len(triangle[j]))] for j in range(len(triangle))]
for i in range(len(triangle)):
for j in range(len(triangle[i])):
if i == 0 and j == 0:
d[0][0] = triangle[0][0]
elif j == 0:
d[i][0] = triangle[i][0] + d[i-1][0]
elif j == len(triangle[i]) - 1:
d[i][j] = triangle[i][j] + d[i-1][j-1]
else:
d[i][j] = min(d[i-1][j-1],d[i-1][j]) + triangle[i][j]
result = sorted(d[len(triangle)-1])
return result[0]
class Solution:
# @param triangle, a list of lists of integers
# @return an integer
def minimumTotal(self, triangle):
N = len(triangle)
d = triangle[len(triangle)-1]
for i in reversed(range(N-1)):
for j in range(i+1):
d[j] = min(d[j],d[j+1]) + triangle[i][j]
return d[0]
| [
"[email protected]"
]
| |
c0d01549392c14b63f25cf3ca994a4bb47d47047 | 770537437474c63f6878c26a10a5853a9687c649 | /Service/app/subscriber.py | fc7c5667b54bc22b29bbde8c6796ec4cd403f98a | []
| no_license | Makalolu/BRKACI-2945-CLUS | 89013da0a2c828abe43b2ab39f8bb85587c625ff | 197702202ca146e6c82cb39ad48fad8569d1393d | refs/heads/master | 2022-02-22T19:00:47.438095 | 2018-06-17T17:27:52 | 2018-06-17T17:27:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,783 | py |
import logging, sys
from .utils import (setup_logger, get_app, pretty_print, db_is_alive, init_db,
get_apic_session, get_class, subscribe,
)
# module level logging
logger = logging.getLogger(__name__)
def dns_subscriptions(db):
""" build subscription to APIC dns objects and keep consistent values in
database. On startup, simply wipe the db since we'll be pulling new
objects (and any cached entries can be considered invalid on startup)
dnsDomain
- multiple domains supported, only one is 'default'
- track 'name' and 'isDefault' (yes/no)
- only support dnsp-default
dnsProv
- multiple providers supported, only one is preferred
- track 'addr' which should be unique and 'preferred' (yes/no)
- only support dnsp-default
"""
# initialize db to clear out all existing objects
init_db()
# read initial state and insert into database
(domains, providers) = ([], [])
session = get_apic_session()
if session is None:
logger.error("unable to connect to APIC")
return
dnsDomain = get_class(session, "dnsDomain")
dnsProv = get_class(session, "dnsProv")
if dnsDomain is None or dnsProv is None:
logger.error("failed to perform dns init")
return
for obj in dnsDomain:
attr = obj[obj.keys()[0]]["attributes"]
if "name" in attr and "dn" in attr and "isDefault" in attr:
if "/dnsp-default/" in attr["dn"]:
domains.append({
"dn": attr["dn"],
"name":attr["name"],
"isDefault": True if attr["isDefault"]=="yes" else False
})
for obj in dnsProv:
attr = obj[obj.keys()[0]]["attributes"]
if "addr" in attr and "dn" in attr and "preferred" in attr:
if "/dnsp-default/" in attr["dn"]:
providers.append({
"dn": attr["dn"],
"addr":attr["addr"],
"preferred": True if attr["preferred"]=="yes" else False
})
# insert domains and providers into database
logger.debug("inserting domains: %s, and providers: %s"%(domains,providers))
db.dnsDomain.insert_many(domains)
db.dnsProv.insert_many(providers)
# setup subscriptions to interesting objects
interests = {
"dnsDomain": {"callback": handle_dns_event},
"dnsProv": {"callback": handle_dns_event},
}
subscribe(interests)
logger.error("subscription unexpectedly ended")
def handle_dns_event(event):
""" handle created, deleted, modified events for dnsProv and dnsDomain by
updating corresponding object in db.
On successful create/delete clear dnsCache
"""
if "imdata" in event and type(event["imdata"]) is list:
for obj in event["imdata"]:
cname = obj.keys()[0]
attr = obj[cname]["attributes"]
if "status" not in attr or "dn" not in attr or \
attr["status"] not in ["created","modified", "deleted"]:
logger.warn("skipping invalid event for %s: %s" % (attr,cname))
continue
if cname not in ["dnsProv", "dnsDomain"]:
logger.debug("skipping event for classname %s" % cname)
continue
db_attr = ["dn"]
if cname == "dnsDomain": db_attr+=["name", "isDefault"]
else: db_attr+=["addr", "preferred"]
# create object that will be added/deleted/updated in db
obj = {}
for a in db_attr:
if a in attr: obj[a] = attr[a]
if "isDefault" in obj:
obj["isDefault"] = True if obj["isDefault"]=="yes" else False
if "preferred" in obj:
obj["preferred"] = True if obj["preferred"]=="yes" else False
logger.debug("%s %s obj:%s" % (cname, attr["status"], obj))
if attr["status"] == "created" or attr["status"] == "modified":
ret = db[cname].update_one(
{"dn":attr["dn"]}, {"$set":obj}, upsert=True
)
logger.debug("update_one match/modify/upsert: [%s,%s,%s]" % (
ret.matched_count, ret.modified_count, ret.upserted_id))
if attr["status"] == "deleted":
ret = db[cname].delete_one({"dn":attr["dn"]})
logger.debug("delete_one deleted: %s" % ret.deleted_count)
if attr["status"] == "created" or attr["status"] == "deleted":
logger.debug("clearing dnsCache")
db["dnsCache"].drop()
if __name__ == "__main__":
# main can be used to run subscription or just to test db access
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--check_db", action="store_true", dest="check_db",
help="check for successful db connection")
args = parser.parse_args()
try:
# setup local logger along with 'app' logger
logger = setup_logger(logger, "subscriber.log", quiet=True)
setup_logger(logging.getLogger("app"), "subscriber.log", quiet=True)
# check db is alive before executing background subscriber
if not db_is_alive():
logger.error("unable to connect to db")
sys.exit(1)
if args.check_db:
# successfully checked db already
sys.exit(0)
# run subscriptions which only stop on error
app = get_app()
with app.app_context():
db = app.mongo.db
dns_subscriptions(db)
except KeyboardInterrupt as e:
print "\ngoodbye!\n"
sys.exit(1)
| [
"[email protected]"
]
| |
3396d6c761448a379ff7daa72b1bf3da2deb0c49 | 00d7e9321d418a2d9a607fb9376b862119f2bd4e | /utils/appendix_figure_renderer.py | 3b234fb896d68ad1a3003e3c74ea40a594132ff7 | [
"MIT"
]
| permissive | baluneboy/pims | 92b9b1f64ed658867186e44b92526867696e1923 | 5a07e02588b1b7c8ebf7458b10e81b8ecf84ad13 | refs/heads/master | 2021-11-16T01:55:39.223910 | 2021-08-13T15:19:48 | 2021-08-13T15:19:48 | 33,029,780 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,405 | py | #!/usr/bin/env python
import os
from collections import OrderedDict
from pims.files.pdfs.pdfjam import PdfjamCommand
from pims.files.pdfs.pdftk import convert_odt2pdf, PdftkCommand
from pims.files.pdfs.pdfjam import CpdfScalePageCommand, CpdfStampCommand
from pims.files.utils import listdir_filename_pattern
from appy.pod.renderer import Renderer
# FIXME see about totally avoiding ODT template and using cpdf for header and footer placeholder text, etc.
# FIXME see about why some pages (all portrait, 3-panel subplots get scaled differently)
# create PDF output file rendered from ODT template that has conditional text (appy.pod) placeholders
def render_pdf_background_from_odt_template(input_odt_template_file, header_dict, page_num, total_num, appendix_letter, fig_num, caption, pdf_out_dir):
"""create PDF output file rendered from ODT template that has conditional text (appy.pod) placeholders"""
# add specifics for this page to dict
page_dict = header_dict
page_dict['PN'] = page_num
page_dict['TN'] = total_num
page_dict['AL'] = appendix_letter
page_dict['FN'] = fig_num
page_dict['Caption'] = caption
# now page_dict contains all expected names for appy/pod template substitution
# create output filename
pagestr = '_page%03d' % page_num
tmp_name = 'appendix' + appendix_letter.upper() + pagestr + '.odt'
tmp_odt_file = os.path.join(pdf_out_dir, tmp_name)
# render odt
odt_renderer = Renderer( input_odt_template_file, page_dict, tmp_odt_file )
odt_renderer.run()
# convert to pdf
convert_odt2pdf(tmp_odt_file)
# return PDF fullfilename
return tmp_odt_file.replace('.odt', '.pdf')
# return list of PDF files for this drop number (i.e. drop dir)
def get_analysis_template_plot_pdfs(drop_num):
"""return list of PDF files for this drop number (i.e. drop dir)"""
dirpath = '/misc/yoda/www/plots/user/urban/sams_zgf_2016/publication/drop%d' % drop_num
fname_pat = 'drop.*\.pdf'
tmp_list = listdir_filename_pattern(dirpath, fname_pat)
# filter tmp_list to ignore previous run's _cpdf_ filenames
return [ x for x in tmp_list if "_cpdf_" not in x ]
# return filename of scaled plot PDF file
def scale_plot_pdf_file(plot_pdf_file, xscale=0.8, yscale=0.8):
"""return filename of scaled plot PDF file"""
cmd = CpdfScalePageCommand(plot_pdf_file, xscale=xscale, yscale=yscale)
cmd.run()
return cmd.outfile
# return filename of newly created background (header, page number, title, etc.)
def create_background_onto_file(page_num, total_num, appendix_letter, fig_num, caption, pdf_out_dir, title, subtitle, mon_year):
"""return filename of newly created background (header, page number, title, etc.)"""
# trusted template ODT file
odt_template_file = '/home/pims/Documents/appendix_plots_and_figures_template.odt'
# these dict items apply to all pages (header lines) in appendices
header_dict = {'title': title, 'subtitle': subtitle, 'mon_year': mon_year} # applies to all pages
return render_pdf_background_from_odt_template(odt_template_file, header_dict, page_num, total_num, appendix_letter, fig_num, caption, pdf_out_dir)
def do_drop(drop_num):
appendix_letter = DROP_MAP[drop_num]
print 'Working on Drop %d for Appendix %s' % (drop_num, appendix_letter)
# get pdf files for this drop
drop_files = get_analysis_template_plot_pdfs(drop_num)
def three_appendices_at_once():
FIRST_ATP_APP_PAGE_NUM = 27 # 1st page num from Word document's 1st "Analysis Template..." appendix
INTERIM_PAGES_ADDED = 2 # one each for Drop 3's and Drop 4's first (non-fig) page
NUM_PAGES_AFTER_LAST_ATP_PAGE = 3 # how many pages in Word doc come after last "Analysis Template..." appendix page
DROP_MAP = {
#DROP_NUM APPENDIX
2: 'C',
3: 'D',
4: 'E',
}
# these dict items apply to all pages (header lines) in appendices
title = 'Analysis of SAMS Measurements on M-Vehicle in Zero Gravity Research Facility for Characterization Drops from January 20 to February 3, 2016'
subtitle = 'SAMS-DOC-013'
mon_year = 'September, 2016'
# get list of analysis template plot PDFs
pdf_files = []
for drop_num in [2, 3, 4]:
drop_files = get_analysis_template_plot_pdfs(drop_num)
pdf_files.extend(drop_files)
#print pdf_files[0:3]
#print pdf_files[-2:]
#raise SystemExit
# get list of captions; FIXME as this naively assumes one-to-one match with pdf_files gotten above
captions_file = '/misc/yoda/www/plots/user/urban/sams_zgf_2016/publication/captions_for_analysis_template_plots.txt'
with open(captions_file) as f:
caption_lines = f.readlines()
captions = [x.strip('\n') for x in caption_lines]
if len(pdf_files) != len(captions):
raise Exception('Abort: the number of PDFs found does not match the number of captions.')
total_num = FIRST_ATP_APP_PAGE_NUM + len(captions) + INTERIM_PAGES_ADDED + NUM_PAGES_AFTER_LAST_ATP_PAGE
top_offset = 99 # used by CpdfStampCommand to offset scaled plot from top of page during stamping
# for each plot PDF file, scale it and stamp on background PDF with header, page number, page total, etc.
count = 0
page_num = FIRST_ATP_APP_PAGE_NUM
old_drop = None
for tup in zip(pdf_files, captions):
pdf_file = tup[0]
caption = tup[1]
count += 1
drop_num = int(os.path.basename(pdf_file)[4])
appendix_letter = DROP_MAP[drop_num]
# FIXME what is better, pythonic way to get page_num reset for each new appendix
if old_drop and drop_num != old_drop:
page_num += 1 # advance to allow for first Appendix (non-fig) page
count = 1 # reset count within appendix
# scale plot PDF (portrait) file and return scaled filename
scaled_plot_pdf_file = scale_plot_pdf_file(pdf_file)
# specifics for this page
fig_num = count
page_num += 1
# FIXME use temp files and do clean-up of those
# FIXME see about quieting down the logging output crap
pdf_out_dir = '/tmp'
onto_file = create_background_onto_file(page_num, total_num, appendix_letter, fig_num, caption, pdf_out_dir, title, subtitle, mon_year)
cmd = CpdfStampCommand(scaled_plot_pdf_file, top_offset, onto_file)
cmd.run()
print 'p.%02d Fig.%s-%02d %s\n%s' % (page_num, appendix_letter, count, os.path.basename(pdf_file), cmd.outfile)
old_drop = drop_num
print 'NOW DO pdfjoin appendix*_cpdf_stamp-on_99.pdf IN /tmp DIR'
def one_appendix(drop_num, appendix_letter, fig1_page_num, total_doc_pages, title, subtitle, mon_year):
# get list of analysis template plot PDFs
pdf_files = get_analysis_template_plot_pdfs(drop_num)
print 'Found %d PDF files for Drop %d in Appendix %s' % (len(pdf_files), drop_num, appendix_letter)
# get list of captions; FIXME as this naively assumes one-to-one match with pdf_files gotten above
captions_file = '/misc/yoda/www/plots/user/urban/sams_zgf_2016/publication/captions_for_analysis_template_plots_appendix_%s.txt' % appendix_letter.lower()
with open(captions_file) as f:
caption_lines = f.readlines()
captions = [x.strip('\n') for x in caption_lines]
if len(pdf_files) != len(captions):
raise Exception('Abort: the number of PDFs found does not match the number of captions.')
top_offset = 99 # used by CpdfStampCommand to offset scaled plot from top of page during stamping
# for each plot PDF file, scale it and stamp on background PDF with header, page number, page total, etc.
fig_num = 1
page_num = fig1_page_num
for tup in zip(pdf_files, captions):
pdf_file = tup[0]
caption = tup[1]
# scale plot PDF (portrait) file and return scaled filename
scaled_plot_pdf_file = scale_plot_pdf_file(pdf_file)
# FIXME use temp files and do clean-up of those
# FIXME see about quieting down the logging output crap
pdf_out_dir = '/tmp'
onto_file = create_background_onto_file(page_num, total_doc_pages, appendix_letter, fig_num, caption, pdf_out_dir, title, subtitle, mon_year)
cmd = CpdfStampCommand(scaled_plot_pdf_file, top_offset, onto_file)
cmd.run()
print 'p.%02d Fig.%s-%02d %s\n%s' % (page_num, appendix_letter, fig_num, os.path.basename(pdf_file), cmd.outfile)
fig_num += 1
page_num += 1
print 'IN /tmp DIR, NOW DO FOLLOWING:'
print 'pdfjoin appendixC_*_cpdf_stamp-on_99.pdf -o /tmp/appendixC.pdf'
print 'pdfjoin appendixD_*_cpdf_stamp-on_99.pdf -o /tmp/appendixD.pdf'
print 'pdfjoin appendixE_*_cpdf_stamp-on_99.pdf -o /tmp/appendixE.pdf'
print 'THEN, THIS:'
print '/usr/bin/gs -o gs-repaired-appendixC.pdf -dPDFSETTINGS=/prepress -sDEVICE=pdfwrite appendixC.pdf'
print '/usr/bin/gs -o gs-repaired-appendixD.pdf -dPDFSETTINGS=/prepress -sDEVICE=pdfwrite appendixD.pdf'
print '/usr/bin/gs -o gs-repaired-appendixE.pdf -dPDFSETTINGS=/prepress -sDEVICE=pdfwrite appendixE.pdf'
if __name__ == "__main__":
#three_appendices_at_once()
# FIXME -- modify these (maybe just mon_year and total_doc_pages)
# these dict items apply to all pages (header lines) in all appendices
title = 'Analysis of SAMS Measurements on M-Vehicle in Zero Gravity Research Facility for Characterization Drops from January 20 to February 3, 2016'
subtitle = 'SAMS-DOC-013'
mon_year = 'February, 2017'
total_doc_pages = 119
# FIXME -- modify these (in Word, try Ctrl+G and go to page nums shown and verify those are first fig pages)
# Check your Word doc to see how these value should be set:
drop_info = {
# DROP APPENDIX FIRST_FIG_PAGE_NUM
2: ('C', 28),
3: ('D', 57),
4: ('E', 86),
}
for drop_num, tup in drop_info.iteritems():
appendix_letter, fig1_page_num = tup[0], tup[1]
one_appendix(drop_num, appendix_letter, fig1_page_num, total_doc_pages, title, subtitle, mon_year)
print drop_num, appendix_letter, fig1_page_num, "done" | [
"[email protected]"
]
| |
ac914612ce2117c1765de9ca6321750ef3079aef | 838b09e0e25280cccf754e788af8febbfb1275f7 | /app/__init__.py | f460a89951f37c5aa6fabc5ec077d2699df1e3c1 | []
| no_license | Pincenti/fakebook-march | acf1dcb0cb16770353026f3e7f112709cda8bc5e | 46be080bcc26ea661817bcb295804ff443a02b6d | refs/heads/main | 2023-04-09T05:06:16.000529 | 2021-04-23T19:02:10 | 2021-04-23T19:02:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | from flask import Flask
app = Flask(__name__)
from .import routes, models | [
"[email protected]"
]
| |
b2a671dfca5e7fc447b993c10a529875dc54603f | c7061fb106b801c12fb40ff331d927a5bb24da80 | /BasicExerciseAndKnowledge/w3cschool/n16_format_datetime.py | b0f4e62a46fdd8c480a544be789ecdafb00a1d3a | [
"MIT"
]
| permissive | Jonathan1214/learn-python | 34e6b5612beeb1a46b5964b0a4e306656355fe84 | 19d0299b30e953069f19402bff5c464c4d5580be | refs/heads/master | 2020-03-27T09:03:16.785034 | 2018-08-31T02:48:34 | 2018-08-31T02:48:34 | 146,310,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | #coding:utf-8
# 题目:输出指定格式的日期
import time
import datetime
# 目的在于熟悉这个模块
print time.ctime() # localtime
print time.asctime(time.localtime())
print time.asctime(time.gmtime()) # gmt
print datetime.datetime(2018, 8, 12)
# print datetime.tzinfo
print datetime.date.today()
print datetime.date.fromtimestamp.__doc__ | [
"[email protected]"
]
| |
49f0bec871aede1626dd9b0823050f24018b7413 | c703b8ac3b5545857f6c95efa2d61eaf7a664021 | /iPERCore/tools/human_digitalizer/deformers/__init__.py | e0d6c7b177b6946f7ec4806e5c0de347eece34a1 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license",
"Apache-2.0",
"BSD-2-Clause"
]
| permissive | iPERDance/iPERCore | d29681d229b3098b3517b1abf4f7ea65f579de73 | fcf9a18ffd66bf3fdd3eea4153a3bc4785131848 | refs/heads/main | 2023-07-30T15:04:15.835396 | 2023-04-12T14:21:23 | 2023-04-12T14:21:23 | 313,664,064 | 2,520 | 339 | Apache-2.0 | 2023-05-12T03:26:52 | 2020-11-17T15:36:25 | Python | UTF-8 | Python | false | false | 1,117 | py | # Copyright (c) 2020-2021 impersonator.org authors (Wen Liu and Zhixin Piao). All rights reserved.
import torch
from .sil_deformer import SilhouetteDeformer
from .clothlinks_deformer import ClothSmplLinkDeformer
def run_sil2smpl_offsets(obs_sils, init_smpls, image_size, device=torch.device("cuda:0"),
visualizer=None, visual_poses=None):
"""
Args:
obs_sils (np.ndarray):
init_smpls (np.ndarray):
image_size (int):
device (torch.device):
visualizer (None or Visualizer):
visual_poses (None or np.ndarray):
Returns:
"""
# 1. define Deformer Solver
deform_solver = SilhouetteDeformer(image_size=image_size, device=device)
# 2. format inputs for SilhouetteDeformer.solve()
cam = init_smpls[:, 0:3]
pose = init_smpls[:, 3:-10]
shape = init_smpls[:, -10:]
obs = {
"sil": obs_sils,
"cam": cam,
"pose": pose,
"shape": shape
}
# 3. solve the offsets
offsets = deform_solver.solve(obs, visualizer, visual_poses).cpu().detach().numpy()
return offsets
| [
"[email protected]"
]
| |
46f4c190ec307f397e873c46ac6abca7c00b6cba | e616ea35ead674ebb4e67cae54768aaaeb7d89c9 | /project/alma/disciplines/migrations/0001_initial.py | cd2f83a805a6561b60a83706fe7cba9576acbc37 | []
| no_license | VWApplications/VWAlmaAPI | 12bb1888533cf987739b0e069737afa6337141e1 | 3a8009b17518384c269dfee3c8fe44cbe2567cc0 | refs/heads/master | 2022-04-02T10:26:49.832202 | 2020-02-12T04:46:31 | 2020-02-12T04:46:31 | 161,098,215 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,490 | py | # Generated by Django 2.1.4 on 2019-09-21 20:17
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import re
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Discipline',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(help_text='Title of discipline', max_length=100, verbose_name='Title')),
('institution', models.CharField(help_text='University or School in which the user is inserted.', max_length=100, verbose_name='Institution')),
('course', models.CharField(help_text='Course that is ministered the discipline', max_length=100, verbose_name='Course')),
('description', models.TextField(help_text='Description of discipline', verbose_name='Description')),
('classroom', models.CharField(default='Class A', help_text='Classroom title of discipline.', max_length=10, validators=[django.core.validators.RegexValidator(re.compile('^Class|^Turma [A-Z]$'), "Enter a valid classroom, the classroom need to be 'Class A-Z'")], verbose_name='Classroom')),
('password', models.CharField(blank=True, help_text='Password to get into the class.', max_length=30, verbose_name='Password')),
('students_limit', models.PositiveIntegerField(default=0, help_text='Students limit to get in the class.', validators=[django.core.validators.MaxValueValidator(60, 'There can be no more than %(limit_value)s students in the class.'), django.core.validators.MinValueValidator(5, 'Must have at least %(limit_value)s students in class.')], verbose_name='Students limit')),
('monitors_limit', models.PositiveIntegerField(default=0, help_text='Monitors limit to insert in the class.', validators=[django.core.validators.MaxValueValidator(5, 'There can be no more than %(limit_value)s monitors in the class.'), django.core.validators.MinValueValidator(0, 'Ensure this value is greater than or equal to %(limit_value)s.')], verbose_name='Monitors limit')),
('is_closed', models.BooleanField(default=False, help_text='Close discipline.', verbose_name='Is closed?')),
('created_at', models.DateTimeField(auto_now_add=True, help_text='Date that the discipline is created.', verbose_name='Created at')),
('updated_at', models.DateTimeField(auto_now=True, help_text='Date that the discipline is updated.', verbose_name='Updated at')),
('monitors', models.ManyToManyField(blank=True, related_name='monitor_classes', to=settings.AUTH_USER_MODEL, verbose_name='Monitors')),
('students', models.ManyToManyField(blank=True, related_name='student_classes', to=settings.AUTH_USER_MODEL, verbose_name='Students')),
('teacher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='disciplines', related_query_name='discipline', to=settings.AUTH_USER_MODEL, verbose_name='Teacher')),
],
options={
'verbose_name': 'Discipline',
'verbose_name_plural': 'Disciplines',
'ordering': ['title', 'created_at'],
},
),
]
| [
"[email protected]"
]
| |
322b370d6d03d1c9bfafe46a87d7b9c8a55eaae6 | ce0f8956c4c308c67bd700d31fe8d5a17b16ac08 | /Python3/src/23 Miscellaneous Topics/PDF Manipulation/02_createWatermark.py | b055027b0fc51eb03efc7fb7e50e4af5484af4a7 | []
| no_license | seddon-software/python3 | 795ae8d22a172eea074b71d6cd49d79e388d8cc6 | d5e6db1509a25c1a3040d5ae82d757539a2ff730 | refs/heads/master | 2021-07-10T15:48:31.893757 | 2020-07-16T20:29:22 | 2020-07-16T20:29:22 | 175,872,757 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 554 | py | from reportlab.pdfgen import canvas
point = 10
inch = 72
TEXT = "watermark"
def make_pdf_file(output_filename):
title = output_filename
h = 8.5 * inch
v = 11 * inch
grey = 0.9
c = canvas.Canvas(output_filename, pagesize=(h, v))
c.setStrokeColorRGB(0,0,0)
c.setFillColorRGB(grey, grey, grey)
c.setFont("Helvetica", 12 * point)
c.rotate(45)
c.translate(h/2, 0)
c.drawString(-h/8, 0, TEXT )
c.showPage()
c.save()
filename = "pdfs/watermark.pdf"
make_pdf_file(filename)
print(("Wrote", filename)) | [
"[email protected]"
]
| |
dd1d15c77bbed78ecbb276388312c71711b89b76 | 20bb1ae805cd796a7c377e55966633441d1d9fd5 | /CodeForces/Problems/887B Cubes for Masha/cubes.py | 7e511bea378f4a51b6295ec6b24c35eb89ef6910 | []
| no_license | nathantheinventor/solved-problems | 1791c9588aefe2ebdc9293eb3d58317346d88e83 | c738e203fa77ae931b0ec613e5a00f9a8f7ff845 | refs/heads/master | 2022-10-27T08:58:23.860159 | 2022-10-13T20:18:43 | 2022-10-13T20:18:43 | 122,110,149 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 972 | py | cubes = [input().split() for _ in range(int(input()))]
def canMake(s):
if len(s) == 1:
for cube in cubes:
if s in cube:
return True
return False
elif len(s) == 2:
for i, cube1 in enumerate(cubes):
if s[0] in cube1:
for j, cube2 in enumerate(cubes):
if i != j and s[1] in cube2:
return True
return False
elif len(s) == 3:
for i, cube1 in enumerate(cubes):
if s[0] in cube1:
for j, cube2 in enumerate(cubes):
if i != j and s[1] in cube2:
for k, cube3 in enumerate(cubes):
if i != k and j != k and s[2] in cube3:
return True
return False
if not canMake("1"):
print(0)
else:
for i in range(1, 1000):
if not canMake(str(i)):
print(i - 1)
break | [
"[email protected]"
]
| |
564e7ae6d142c78bcd5de942b9a6a69facdfb9d0 | 8e0cdf235cd82e422c62fee3e6d044e4f4ee7614 | /feedback/migrations/0004_remove_translation.py | f0d087297b5ccce43ab6fa90c2ef41ed6fab4ac5 | [
"BSD-3-Clause"
]
| permissive | stevecassidy/signbank-feedback | 4ae1c58a95a27428d11ef4a692c52738e9a4fb6f | d4cb8a7f445ca42c90a69d565d43875f50251aa8 | refs/heads/master | 2022-01-14T05:07:31.474605 | 2022-01-12T04:18:42 | 2022-01-12T04:18:42 | 78,930,468 | 1 | 1 | null | 2017-01-14T09:10:43 | 2017-01-14T09:10:43 | null | UTF-8 | Python | false | false | 407 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-09-17 13:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('feedback', '0003_map_translation'),
]
operations = [
migrations.RemoveField(
model_name='signfeedback',
name='translation',
),
]
| [
"[email protected]"
]
| |
50972c24f80116bd960f7350abeb6b01cde72fdf | 4b7806dd0ea8f7eb54bec25eb5afcdfdc02d91cf | /NEURON/izhiGUI.py | 7b966f77d36f4b185374e9a3d30eb777ae9d16a8 | []
| permissive | OpenSourceBrain/IzhikevichModel | ab6018e8392b073d17cb4e29c68108a4397f098a | 83fe93ea390bb240f31e7352f6a4ad744dec43ca | refs/heads/master | 2023-08-31T00:01:19.985460 | 2023-08-18T15:13:31 | 2023-08-18T15:13:31 | 4,956,319 | 23 | 10 | BSD-3-Clause | 2023-09-04T11:06:46 | 2012-07-09T10:41:31 | Jupyter Notebook | UTF-8 | Python | false | false | 15,133 | py | """
izh.py
Python/NEURON GUI for the different celltypes of Izhikevich neuron (versions from 2 publications).
* 2003 Izhikevich artificial neuron model from
EM Izhikevich "Simple Model of Spiking Neurons"
IEEE Transactions On Neural Networks, Vol. 14, No. 6, November 2003 pp 1569-1572
* 2007 Izhikevich artificial neuron model from
EM Izhikevich (2007) "Dynamical systems in neuroscience" MIT Press
Cell types available from Izhikevich, 2007 book:
1. RS - Layer 5 regular spiking pyramidal cell (fig 8.12 from 2007 book)
2. IB - Layer 5 intrinsically bursting cell (fig 8.19 from 2007 book)
3. CH - Cat primary visual cortex chattering cell (fig8.23 from 2007 book)
4. LTS - Rat barrel cortex Low-threshold spiking interneuron (fig8.25 from 2007 book)
5. FS - Rat visual cortex layer 5 fast-spiking interneuron (fig8.27 from 2007 book)
6. TC - Cat dorsal LGN thalamocortical (TC) cell (fig8.31 from 2007 book)
7. RTN - Rat reticular thalamic nucleus (RTN) cell (fig8.32 from 2007 book)
Implementation by: Salvador Dura-Bernal, Cliff Kerr, Bill Lytton
([email protected]; [email protected]; [email protected])
"""
# adapted from /u/billl/nrniv/sync/izh.hoc
import os, sys, collections
import numpy as np
from neuron import h, gui
h.load_file('stdrun.hoc')
import izhi2007Figs as iz07fig
import izhi2007Wrapper as izh07
import __main__
py = __main__
h.tstop=500
h.cvode_active(0)
h.dt=0.1
izh, cell07 = None, None # must be declared here since needs to be imported elsewhere
type2003 = collections.OrderedDict([
# a b c d vviv tstop
('regular spiking (RS)' , (0.02 , 0.2 , -65.0 , 8.0 , -63.0 , 150.0)) ,
('intrinsically bursting (IB)' , (0.02 , 0.2 , -55.0 , 4.0 , -70.0 , 150.0)) ,
('chattering (CH)' , (0.02 , 0.2 , -50.0 , 2.0 , -70.0 , 150.0)) ,
('fast spiking (FS)' , (0.1 , 0.2 , -65.0 , 2.0 , -70.0 , 150.0)) ,
('thalamo-cortical (TC)' , (0.02 , 0.25, -65.0 , 0.05 , -63.0 , 150.0)) ,
('thalamo-cortical burst (TC)' , (0.02 , 0.25, -65.0 , 0.05 , -87.0 , 150.0)) ,
('resonator (RZ)' , (0.1 , 0.26 , -65.0 , 2.0 , -70.0 , 100.0)) ,
('low-threshold spiking (LTS)' , (0.02 , 0.25 , -65.0 , 2.0 , -63.0 , 250.0))])
type2004 = collections.OrderedDict([
# a b c d vviv tstop
('tonic spiking' , (0.02 , 0.2 , -65.0 , 6.0 , -70.0 , 100.0)) ,
('mixed mode' , (0.02 , 0.2 , -55.0 , 4.0 , -70.0 , 160.0)) ,
('spike latency' , (0.02 , 0.2 , -65.0 , 6.0 , -70.0 , 100.0)) ,
('rebound spike' , (0.03 , 0.25 , -60.0 , 4.0 , -64.0 , 200.0)) ,
('Depolarizing afterpotential' , (1.0 , 0.2 , -60.0 , -21.0 , -70.0 , 50.0)) ,
('phasic spiking' , (0.02 , 0.25 , -65.0 , 6.0 , -64.0 , 200.0)) ,
('spike frequency adaptation' , (0.01 , 0.2 , -65.0 , 8.0 , -70.0 , 85.0)) ,
('subthreshold oscillations' , (0.05 , 0.26 , -60.0 , 0.0 , -62.0 , 200.0)) ,
('rebound burst' , (0.03 , 0.25 , -52.0 , 0.0 , -64.0 , 200.0)) ,
('accomodation' , (0.02 , 1.0 , -55.0 , 4.0 , -65.0 , 400.0)) ,
('tonic bursting' , (0.02 , 0.2 , -50.0 , 2.0 , -70.0 , 220.0)) ,
('Class 1' , (0.02 , -0.1 , -55.0 , 6.0 , -60.0 , 300.0)) ,
('resonator' , (0.1 , 0.26 , -60.0 , -1.0 , -62.0 , 400.0)) ,
('threshold variability' , (0.03 , 0.25 , -60.0 , 4.0 , -64.0 , 100.0)) ,
('inhibition-induced spiking' , (-0.02 , -1.0 , -60.0 , 8.0 , -63.8 , 350.0)) ,
('phasic bursting' , (0.02 , 0.25 , -55.0 , 0.05 , -64.0 , 200.0)) ,
('Class 2' , (0.2 , 0.26 , -65.0 , 0.0 , -64.0 , 300.0)) ,
('integrator' , (0.02 , -0.1 , -55.0 , 6.0 , -60.0 , 100.0)) ,
('bistability' , (0.1 , 0.26 , -60.0 , 0.0 , -61.0 , 300.0)) ,
('inhibition-induced bursting' , (-0.026 , -1.0 , -45.0 , -2.0 , -63.8 , 350.0))])
choices = collections.OrderedDict([
('2003 PP model' , (lambda: h.Izhi2003a(0.5,sec=cell03), lambda: izh._ref_V, type2003)),
('2003 Sec model', (lambda: h.Izhi2003b(0.5,sec=cell03), lambda: cell03(0.5)._ref_v, type2003)),
('2004 PP model' , (lambda: h.Izhi2003a(0.5,sec=cell03), lambda: izh._ref_V, type2004)),
('2004 Sec model', (lambda: h.Izhi2003b(0.5,sec=cell03), lambda: cell03(0.5)._ref_v, type2004)),
('2007 PP model' , (lambda: izh07.IzhiCell(host=izh07.dummy), lambda: izh._ref_V, izh07.type2007)),
('2007 Sec model' , (lambda: izh07.IzhiCell(), lambda: cell07.sec(0.5)._ref_v, izh07.type2007))])
ch=choices.keys()
def newmodel (ty=None) :
"2003,2004 was the orig model; 2007 is the redesign; look at global izhtype if no "
return izhtype.find('2007') > -1 if ty is None else ty.find('2007') > -1
#* setup the cell
izhtype='2004 PP model'
def cellset ():
global cell07, cell03, izh, vref, uvvset, fih, izhtype
if newmodel():
cell07 = choices[izhtype][0]()
izh = cell07.izh
def uvvset () : pass
else:
cell03 = h.Section(name="cell2003") # this cell will be used for 2003/4; different cell created in izhi2007Wrapper for those
izh = choices[izhtype][0]()
def uvvset () : vref[0], izh.u = vviv, vviv*izh.b
cell03.L, cell03.diam = 6.37, 5 # empirically tuned -- cell size only used for Izh1
fih = [h.FInitializeHandler(uvvset), h.FInitializeHandler(0,Isend)]
vref = choices[izhtype][1]() # can define this afterwards even though used in uvvset above
# h('objref izh'); h.izh = izh # if need to access from hoc
#* parameters for different cell types
playvec, playtvec = [h.Vector() for x in range(2)]
# initialization routines
name, params = None, None
def p (nm, pm=None) :
global name, vviv, params, vvset
if pm is None : pm = choices[izhtype][2][nm]
name, params = nm, pm
if newmodel():
izh.C, izh.k, izh.vr, izh.vt, izh.vpeak, izh.a, izh.b, izh.c, izh.d, izh.celltype = params
h.tstop=1000
else:
izh.a, izh.b, izh.c, izh.d, vviv, h.tstop = params
g.size(0,h.tstop,-100,50)
try:
if newmodel():
graphx() # interviews graphics
iz07fig.recorder(cell07, choices[izhtype][1]()) # vectors to draw under matplotlib
iz07fig.test1(cell07, nm, izhtype)
else:
iz07fig.closeFig()
graphx()
playinit()
h.run()
except: print sys.exc_info()[0],' :',sys.exc_info()[1]
def ivwrap (func, label=''):
wrapper = h.VBox()
wrapper.intercept(1)
func()
wrapper.intercept(0)
wrapper.map(label)
return wrapper
def graphx ():
g.erase_all()
g.addvar("v", choices[izhtype][1](), 2,2)
g.addvar("u", izh._ref_u, 3,1)
g.addvar("Iin", izh._ref_Iin if newmodel() else izh._ref_Iin, 4,2)
try: g.addvar("gsyn", izh._ref_gsyn, 1, 1)
except: pass
I0=I1=T1=0
def playinit () :
global I0,I1,T1
try: izh.f, izh.g= 5, 140 # standard params: V'=0.04*V^2 + 5*V + 140 - u + Iin
except: pass
bub.label[0] = '%s'%(name)
if name=='Depolarizing afterpotential': bub.label[0] = "%s -- REPEATED SPIKING"%(bub.label[0])
if name=='accomodation': bub.label[0] = "%s -- NOT IMPLEMENTED (different functional form;see izh.mod)"%(bub.label[0])
if name=='inhibition-induced bursting': bub.label[0] = "%s -- NOT IMPLEMENTED (convergence problems)"%(bub.label[0])
g.label(0.1,0.9,bub.label[0])
print bub.label[0]
playvec.play_remove()
playtvec.resize(0); playvec.resize(0)
if name=='Class 1' :
T1=30
playtvec.append(0,T1,h.tstop)
playvec.append(0,0,0.075*(h.tstop-T1))
elif name=='Class 2' : # (H) Class 2 exc.
T1=30
playtvec.append(0,T1,h.tstop)
playvec.append(-0.5, -0.5,-0.05+0.015*(h.tstop-T1))
elif name=='accomodation' : # (R) accomodation
playtvec.append(0, 200, 200.001, 300, 312.5, 312.501, h.tstop)
playvec.append( 0, 200/25, 0 , 0 , 4 , 0 , 0)
if name in ['Class 1', 'Class 2', 'accomodation'] : playvec.play(izh._ref_Iin, playtvec, 1)
if name in ['Class 1', 'integrator'] :
try: izh.f, izh.g = 4.1, 108 # don't exist in all the models
except: pass
def synon () :
"Turn on a synapse"
global ns, nc
ns = h.NetStim()
nc = h.NetCon(ns,izh,0,1,10)
ns.start, ns.interval, ns.number = 10, 10, 10
nc.weight[0] = 2
izh.taug = 3
#* box of buttons
class Bubox :
def __init__ (self, type, li) :
self.izhtype = type
vbox, hbox, hbox1 = h.VBox(), h.HBox(), h.HBox()
self.vbox = vbox
lil = len(li)
self.cols, self.rows = {20:(4,5), 8:(4,2), 9:(3,3)}[lil]
self.label=h.ref('================================================================================')
vbox.intercept(1)
h.xpanel("")
h.xvarlabel(self.label)
if newmodel(self.izhtype):
h.xlabel("V' = (k*(V-vr)*(V-vt) - u + Iin)/C if (V>vpeak) V=c [reset]")
h.xlabel("u' = a*(b*(V-vr) - u) if (V>vpeak) u=u+d")
else:
h.xlabel("v' = 0.04*v*v + f*v + g - u + Iin; if (v>thresh) v=c [reset]")
h.xlabel("u' = a*(b*v - u); if (v>thresh) u=u+d")
h.xpanel()
hbox1.intercept(1)
h.xpanel(""); h.xbutton("RUN",h.run); h.xpanel()
self.xvalue('I0','I0')
self.xvalue('I1','I1')
self.xvalue('T1','T1')
hbox1.intercept(0); hbox1.map("")
hbox.intercept(1)
for ii,(k,v) in enumerate(li.iteritems()):
if ii%self.rows==0: h.xpanel("")
h.xbutton(k, (lambda f, arg1, arg2: lambda: f(arg1,arg2))(p, k, v)) # alternative is to use functools.partial
if ii%self.rows==self.rows-1: h.xpanel()
hbox.intercept(0); hbox.map("")
vbox.intercept(0); vbox.map("Spike patterns")
self.label[0]=""
def pr (): pass
def xvalue (self,name,var,obj=py,runner=pr):
h.xpanel("")
h.xvalue(name,(obj, var),0,runner)
h.xpanel()
def xpvalue (self,name,ptr,runner=pr):
"Doesn't work currently"
h.xpanel("")
h.xpvalue(name,ptr,1,runner)
h.xpanel()
def transpose (self,x) : return int(x/self.rows) + x%self.rows*self.cols
# end class Bubox
# current injections for specific models
def Isend () :
global T1,I0,I1
if I0!=0 or I1!=0:
Iin = I0
Isend1(T1,I1)
return
T1=h.tstop/10
if not newmodel(): izh.Iin=0
if name=='tonic spiking': # (A) tonic spiking
Isend1(T1,14)
elif name=='phasic spiking': # (B) phasic spiking
T1=20
Isend1(T1,0.5)
elif name=='tonic bursting': # (C) tonic bursting
T1=22
Isend1(T1,15)
elif name=='phasic bursting': # (D) phasic bursting
T1=20
Isend1(T1,0.6)
elif name=='mixed mode': # (E) mixed mode
Isend1(T1,10)
elif name=='spike frequency adaptation': # (F) spike freq. adapt
Isend1(T1,30)
elif name=='Class 1': # (G) Class 1 exc. -- playvec
pass
elif name=='Class 2': # (H) Class 2 exc. -- playvec
pass
elif name=='spike latency': # (izh.Iin) spike latency
Isend1(T1,7.04)
Isend1(T1+3,0.0)
elif name=='subthreshold oscillations': # (J) subthresh. osc.
Isend1(T1,2)
Isend1(T1+5,0)
elif name=='resonator': # (K) resonator
T2, T3 = T1+20, 0.7*h.tstop
T4 = T3+40
Isend1(T1,0.65) ; Isend1(T2,0.65) ; Isend1(T3,0.65) ; Isend1(T4,0.65)
Isend1(T1+4,0.) ; Isend1(T2+4,0.) ; Isend1(T3+4,0.) ; Isend1(T4+4,0.)
elif name=='integrator': # (L) integrator
T1, T3 = h.tstop/11, 0.7*h.tstop
T2, T4 = T1+5, T3+10
Isend1(T1,9) ; Isend1(T2,9) ; Isend1(T3,9) ; Isend1(T4,9)
Isend1(T1+2,0.) ; Isend1(T2+2,0.) ; Isend1(T3+2,0.) ; Isend1(T4+4,0.)
elif name=='rebound spike': # (M) rebound spike
T1=20
Isend1(T1,-15)
Isend1(T1+5,0)
elif name=='rebound burst': # (N) rebound burst
T1=20
Isend1(T1,-15)
Isend1(T1+5,0)
elif name=='threshold variability': # (O) thresh. variability
T1, T2, T3 =10, 70, 80
Isend1(T1,1) ; Isend1(T2,-6) ; Isend1(T3,1)
Isend1(T1+5,0.) ; Isend1(T2+5,0.) ; Isend1(T3+5,0.)
elif name=='bistability': # (P) bistability
T1, T2, izh.Iin = h.tstop/8, 216, 0.24
Isend1(T1,1.24) ; Isend1(T2,1.24)
Isend1(T1+5,0.24); Isend1(T2+5,0.24)
elif name=='Depolarizing afterpotential': # (Q) DAP depolarizing afterpotential
T1 = 10
Isend1(T1-1,20)
Isend1(T1+1,0)
elif name=='accomodation': # (R) accomodation -- playvec
pass
elif name=='inhibition-induced spiking': # (S) inhibition induced spiking
izh.Iin=80
Isend1(50,75)
Isend1(250,80)
elif name=='inhibition-induced bursting': # (T) inhibition induced bursting
izh.Iin=80
Isend1(50,80) # Isend1(50,75) -- will crash simulator
Isend1(250,80)
elif name=='regular spiking (RS)': # regular spiking (RS)
Isend1(T1,14)
elif name=='intrinsically bursting (IB)': # intrinsically bursting (IB)
Isend1(T1,11)
elif name=='chattering (CH)': # chattering (CH)
Isend1(T1,10)
elif name=='fast spiking (FS)': # fast spiking (FS)
Isend1(T1,10)
elif name=='thalamo-cortical (TC)': # thalamo-cortical (TC)
Isend1(2*T1,1.5)
elif name=='thalamo-cortical burst (TC)': # thalamo-cortical burst (TC)
Isend1(0,-25)
Isend1(3*T1,0)
elif name=='resonator (RZ)': # resonator (RZ)
Isend1(0,-2)
Isend1(T1,-0.5)
Isend1(T1+50,10)
Isend1(T1+55,-0.5)
elif name=='low-threshold spiking (LTS)': # low-threshold spiking (LTS)
Isend1(T1,10)
elif name == 'TC_burst': # thalamo-cortical burst (TC) (2007)
Isend1(0,-1200)
Isend1(120,110)
elif name == 'RTN_burst': # reticular thalamic nucleus burst (TC) (2007)
Isend1(0,-350)
Isend1(120,90)
def Isend1 (tm, Iin) :
def my_event():
izh.Iin = Iin
h.CVode().re_init()
h.cvode.event(tm, my_event)
# izhstim() sets up a single stim into izh cell
# effect easily seen by running "Class 1"
def izhstim () :
stim=h.NetStim(0.5)
stim.number = stim.start = 1
nc = h.NetCon(stim,izh)
nc.delay = 2
nc.weight = 0.1
izh.erev = -5
#* plotting & printing
g, nmenu, bub = None, None, None
def isinstanceh (objref,objtype) : return objref.hname().startswith(objtype.hname()[:-2])
def winup (izht=izhtype):
global bub, g, nmenu, izhtype
izhtype = izht # swap in the new one
cellset()
if g is None:
g=h.Graph(0)
h.graphList[0].append(g)
if g.view_count()<1:
g.view(-0.1*h.tstop,-90,1.2*h.tstop,150,300,200,400,200)
g.size(0,h.tstop,-80,40)
if not bub is None: bub.vbox.unmap()
bub = Bubox(izhtype,choices[izhtype][2])
bub.label[0] = izhtype
if not nmenu is None: nmenu.unmap()
nmenu = ivwrap(lambda: h.nrnpointmenu(izh), izh.hname())
def chwin ():
"Launch windows from model list"
h.xpanel("Izhikevich models")
# outer lambda returns inner lambda so as to pass arg to winup() -- the innermost routine
for c in ch:
h.xbutton(c, (lambda f, arg1: lambda: f(arg1))(winup,c))
h.xpanel()
def vtvec(vv): return np.linspace(0, len(vv)*h.dt, len(vv), endpoint=True)
if __name__ == '__main__': chwin()
| [
"[email protected]"
]
| |
69cc105ffb1b88b37b4962ce32f29a3d2366625d | 1af1f89eb9a178b95d1ba023b209b7538fb151f0 | /Algorithms/498. Diagonal Traverse.py | a78694dcbb277726c2c4bc88dabf90747eadcb45 | []
| no_license | 0xtinyuk/LeetCode | 77d690161cc52738e63a4c4b6595a6012fa5c21e | 08bc96a0fc2b672282cda348c833c02218c356f1 | refs/heads/master | 2023-02-21T16:58:39.881908 | 2021-01-25T08:00:13 | 2021-01-25T08:00:13 | 292,037,842 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 712 | py | class Solution:
def findDiagonalOrder(self, matrix: List[List[int]]) -> List[int]:
sx = 0
sy = 0
m = len(matrix)
if m==0:
return []
n = len(matrix[0])
if n==0:
return []
ans = []
reverse = False
while sx<m and sy<n:
x=sx
y=sy
temp = []
while x>=0 and y<n:
temp.append(matrix[x][y])
x-=1
y+=1
if reverse:
temp.reverse()
reverse = not reverse
ans = ans + temp
if (sx==m-1):
sy+=1
else:
sx+=1
return ans | [
"[email protected]"
]
| |
67142483d36d0db80900abc7955171ba9822c98b | 68cd659b44f57adf266dd37789bd1da31f61670d | /swea/덧셈.py | e5b8ab0ac443bc65fe5936e0ac9141aab0492675 | []
| no_license | 01090841589/solved_problem | c0c6f5a46e4d48860dccb3b0288aa5b56868fbca | bbea2f31e5fe36cad100bc514eacd83545fb25b1 | refs/heads/master | 2023-07-02T23:55:51.631478 | 2021-08-04T13:57:00 | 2021-08-04T13:57:00 | 197,157,830 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py |
def summ(k, scr):
if scr == num:
result = []
for j in range(N):
if visited[j]:
result.append(j+1)
print(result)
return
if scr > num:
return
if k >= N:
return
visited[k] = arr[k]
summ(k+1, scr+arr[k])
visited[k] = 0
summ(k+1, scr)
arr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
N = len(arr)
num = 10
visited = [0] * N
summ(0, 0) | [
"[email protected]"
]
| |
3b522ad5c1bc3e9b2c00cb9dae382a3145c20fd4 | 7cd8ee14711eaf33cee0d9e06e78a974fc579242 | /PIFramework/juicer/spiders/desk_customer_browse.py | e02c7f424af19bcbefa4456451ba138e83a60a4e | []
| no_license | Chandler-Song/pi | c618117dfdd9a7496a57c69f029851e94787f591 | aebc6d65b79ed43c66e7e1bf16d6d9f31b470372 | refs/heads/master | 2022-03-13T02:44:30.452673 | 2019-02-19T09:38:45 | 2019-02-19T09:38:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,849 | py | from juicer.utils import *
from w3lib.http import basic_auth_header
class deskcustomerbrowse(JuicerSpider):
name = "desk_customer_browse"
start_urls = ('https://www.desk.com/',)
def __init__(self, *args, **kwargs):
super(deskcustomerbrowse, self).__init__(*args, **kwargs)
self.auth = basic_auth_header('[email protected]', 'Welcome@123')
self.main_url = 'https://sathyamcinemas.desk.com'
self.headers = {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Authorization': self.auth
}
self.conn = MySQLdb.connect(user="root", host = "localhost", db="DESKCASES", passwd='root', use_unicode=True)
self.cur = self.conn.cursor()
self.conn.set_character_set('utf8')
self.cur.execute('SET NAMES utf8;')
self.cur.execute('SET CHARACTER SET utf8;')
self.cur.execute('SET character_set_connection=utf8;')
get_query_param = "select case_customer_url from desk_cases where case_customer_url not in (select customer_link from desk_customer) order by rand() limit 50000"
self.cur.execute(get_query_param)
self.profiles_list = [i for i in self.cur.fetchall()]
self.customer_insert = "INSERT INTO desk_customer(customer_link, customer_id, customer_company_link, customer_twitter_user, customer_access_company_cases, customer_access_private_portal, customer_addresses, customer_avatar, customer_background, customer_company, customer_company_name, customer_created_at, customer_custom_fields, customer_display_name, customer_emails, customer_external_id, customer_first_name, customer_label_ids, customer_language, customer_last_name, customer_locked_until, customer_phone_numbers, customer_title, customer_uid, customer_updated_at, created_at, modified_at, last_seen ) values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, now(), now(), now()) on duplicate key update modified_at = now(), customer_link=%s, customer_id=%s, customer_company_link=%s, customer_twitter_user=%s, customer_access_company_cases=%s, customer_access_private_portal=%s, customer_addresses=%s, customer_avatar=%s, customer_background=%s, customer_company=%s, customer_company_name=%s, customer_created_at=%s, customer_custom_fields=%s, customer_display_name=%s, customer_emails=%s, customer_external_id=%s, customer_first_name=%s, customer_label_ids=%s, customer_language=%s, customer_last_name=%s, customer_locked_until=%s, customer_phone_numbers=%s, customer_title=%s, customer_uid=%s, customer_updated_at=%s"
def __del__(self):
self.conn.close()
self.cur.close()
def parse(self, response):
sel = Selector(response)
if self.profiles_list:
for cus in self.profiles_list:
yield Request(cus[0], callback=self.parse_customer, headers = self.headers, meta = {"customer_link": cus[0]})
def parse_customer(self, response):
customer_links = response.meta.get('customer_link', '')
output = response.body
output = json.loads(output.strip('\n'))
total_entries = output.get('_embedded', {}).get('entries', [])
if not total_entries:
if isinstance(output, dict):
toal_en = []
toal_en.append(output)
total_entries = toal_en
for ttl_en in total_entries:
company_links = ttl_en.get('_links', {}).get('company', {})
if company_links:
company_links = company_links.get('href', '')
twitter_user = ttl_en.get('_links', {}).get('twitter_user', {})
if twitter_user:
twitter_user = twitter_user.get('href', '')
if company_links:
company_links = "%s%s" %(self.main_url, company_links)
if twitter_user:
twitter_user = "%s%s" %(self.main_url, twitter_user)
access_company_cases = ttl_en.get('access_company_cases', '')
access_private_portal = ttl_en.get('access_private_portal', '')
addresses = '<>'.join(ttl_en.get('addresses', []))
avatar = ttl_en.get('avatar', '')
background = ttl_en.get('background', '')
company = ttl_en.get('company', '')
company_name = ttl_en.get('company_name', '')
created_at = ttl_en.get('created_at', '')
custom_fields = ttl_en.get('custom_fields', {})
if not custom_fields:
custom_fields = ''
else:
custom_fields = json.dumps(custom_fields)
display_name = ttl_en.get('display_name', '')
emails = ttl_en.get('emails', [])
if emails:
emails = '<>'.join(["%s%s%s" % (te.get('type'), ':-', te.get('value')) for te in emails])
else:
emails = ''
external_id = ttl_en.get('external_id', '')
first_name = ttl_en.get('first_name', '')
id_ = str(ttl_en.get('id', ''))
label_ids = '<>'.join([str(ld) for ld in ttl_en.get('label_ids', [])])
language = ttl_en.get('language', '')
last_name = ttl_en.get('last_name', '')
locked_until = ttl_en.get('locked_until', '')
try:
phone_numbers_dict = ttl_en.get('phone_numbers', [])
phone_numbers = phone_numbers_dict[0]['value']
except:
phone_numbers = ''
title = ttl_en.get('title', '')
uid = ttl_en.get('uid', '')
updated_at = ttl_en.get('updated_at', '')
values = (customer_links, id_, company_links, twitter_user, access_company_cases, access_private_portal, addresses, avatar, background, company, company_name, created_at, custom_fields, display_name, emails, external_id, first_name, label_ids, language, last_name, locked_until, phone_numbers, title, uid, updated_at, customer_links, id_, company_links, twitter_user, access_company_cases, access_private_portal, addresses, avatar, background, company, company_name, created_at, custom_fields, display_name, emails, external_id, first_name, label_ids, language, last_name, locked_until, phone_numbers, title, uid, updated_at)
self.cur.execute(self.customer_insert, values)
| [
"[email protected]"
]
| |
df6d16af59ecc459d304d7406ac8442ed9b48f06 | 26771494974942f4ab18d2cd8247506c344e1d14 | /117-populatingNextRightPointersinEachNodeII.py | 9b9e705cd655c6bfec49ca57ca65aa58890158d4 | []
| no_license | wangyunpengbio/LeetCode | 9f4c6076e067c5e847d662679483f737d40e8ca5 | cec1fd11fe43177abb2d4236782c0f116e6e8bce | refs/heads/master | 2020-04-29T22:28:25.899420 | 2020-04-03T07:37:26 | 2020-04-03T07:37:26 | 176,448,957 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,312 | py | """
# Definition for a Node.
class Node:
def __init__(self, val, left, right, next):
self.val = val
self.left = left
self.right = right
self.next = next
"""
class Solution:
def connect(self, root: 'Node') -> 'Node':
if root == None:
return None
queue = [(1,root)]
lastLevel = 1
fillLevelQueue = []
while len(queue) != 0:
level,item = queue.pop(0)
if level == lastLevel + 1: # 临时的列表存完一层,就进行结点连接吗,然后再清空该列表
nodeNum = len(fillLevelQueue)
fillLevelQueue.append(None)
for i in range(nodeNum):
fillLevelQueue[i].next = fillLevelQueue[i+1]
# print("line"+str(i))
lastLevel = lastLevel + 1
fillLevelQueue = []
if item == None: # 如果层中间遍历到空结点,就不追加,层最后遍历到空结点也不追加
continue
fillLevelQueue.append(item) # 每次遍历到结点的时候,顺便把结点存到另一个列表中
# print(item.val)
queue.append((level + 1,item.left))
queue.append((level + 1,item.right))
return root
| [
"[email protected]"
]
| |
472e2678e5a33ac3ef0f0b99023128e995f69fe6 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/built-in/cv/detection/DBpp_ID4145_for_PyTorch/mmocr/apis/train.py | dd009ec1a6e189694585385e994c91ebbb28894e | [
"GPL-1.0-or-later",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 11,120 | py | # -*- coding: utf-8 -*-
# BSD 3-Clause License
#
# Copyright (c) 2017
# All rights reserved.
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ==========================================================================
# Copyright (c) OpenMMLab. All rights reserved.
import warnings
import mmcv
import numpy as np
import torch
import torch.distributed as dist
from torch_npu.contrib.module.deform_conv import ModulatedDeformConv
try:
import apex
from apex import amp
except ImportError:
print("Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex.")
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.device.npu import NPUDataParallel, NPUDistributedDataParallel
from mmcv.runner import (DistSamplerSeedHook, EpochBasedRunner,
Fp16OptimizerHook, OptimizerHook, build_optimizer,
build_runner, get_dist_info)
from mmcv.ops.modulated_deform_conv import ModulatedDeformConv2dPack
from mmdet.core import DistEvalHook, EvalHook
from mmdet.datasets import build_dataloader, build_dataset
from mmocr import digit_version
from mmocr.apis.utils import (disable_text_recog_aug_test,
replace_image_to_tensor)
from mmocr.utils import get_root_logger
class ApexOptimizerHook(OptimizerHook):
def after_train_iter(self, runner):
runner.optimizer.zero_grad()
if self.detect_anomalous_params:
self.detect_anomalous_parameters(runner.outputs['loss'], runner)
with amp.scale_loss(runner.outputs['loss'], runner.optimizer) as scaled_loss:
scaled_loss.backward()
if self.grad_clip is not None:
grad_norm = self.clip_grads(runner.model.parameters())
if grad_norm is not None:
# Add grad norm to the logger
runner.log_buffer.update({'grad_norm': float(grad_norm)},
runner.outputs['num_samples'])
runner.optimizer.step()
def replace_layers(model):
for n, m in model.named_children():
if len(list(m.children())) > 0:
## compound module, go inside it
replace_layers(m)
if isinstance(m, ModulatedDeformConv2dPack):
## simple module
new = ModulatedDeformConv(m.in_channels, m.out_channels, m.kernel_size,
m.stride[0] if isinstance(m.stride, tuple) else m.stride,
m.padding[0] if isinstance(m.padding, tuple) else m.padding,
m.dilation[0] if isinstance(m.dilation, tuple) else m.dilation,
m.groups, m.deform_groups, m.bias)
try:
n = int(n)
model[n] = new
except:
setattr(model, n, new)
def train_detector(model,
dataset,
cfg,
distributed=False,
validate=False,
timestamp=None,
meta=None):
logger = get_root_logger(cfg.log_level)
# prepare data loaders
dataset = dataset if isinstance(dataset, (list, tuple)) else [dataset]
# step 1: give default values and override (if exist) from cfg.data
default_loader_cfg = {
**dict(
num_gpus=len(cfg.gpu_ids),
dist=distributed,
seed=cfg.get('seed'),
drop_last=False,
pin_memory=True,
persistent_workers=False),
**({} if torch.__version__ != 'parrots' else dict(
prefetch_num=2,
)),
}
# update overall dataloader(for train, val and test) setting
default_loader_cfg.update({
k: v
for k, v in cfg.data.items() if k not in [
'train', 'val', 'test', 'train_dataloader', 'val_dataloader',
'test_dataloader'
]
})
# step 2: cfg.data.train_dataloader has highest priority
train_loader_cfg = dict(default_loader_cfg,
**cfg.data.get('train_dataloader', {}))
data_loaders = [build_dataloader(ds, **train_loader_cfg) for ds in dataset]
replace_layers(model)
# put model on gpus
if distributed:
find_unused_parameters = cfg.get('find_unused_parameters', False)
# Sets the `find_unused_parameters` parameter in
# torch.nn.parallel.DistributedDataParallel
if torch.npu.is_available():
model = NPUDistributedDataParallel(
model.npu(),
device_ids=[torch.npu.current_device()],
broadcast_buffers=False,
find_unused_parameters=find_unused_parameters)
else:
model = MMDistributedDataParallel(
model.cuda(),
device_ids=[torch.cuda.current_device()],
broadcast_buffers=False,
find_unused_parameters=find_unused_parameters)
else:
if not torch.cuda.is_available():
assert digit_version(mmcv.__version__) >= digit_version('1.4.4'), \
'Please use MMCV >= 1.4.4 for CPU training!'
if torch.npu.is_available():
model = NPUDataParallel(model.npu(), device_ids=cfg.gpu_ids)
else:
model = MMDataParallel(model, device_ids=cfg.gpu_ids)
# build runner
if torch.npu.is_available():
optimizer = apex.optimizers.NpuFusedSGD(model.module.parameters(),
lr=cfg.optimizer['lr'],
momentum=cfg.optimizer['momentum'],
weight_decay=cfg.optimizer['weight_decay'])
model.module, optimizer = amp.initialize(model.module, optimizer,
opt_level='O1', loss_scale=32768,
combine_grad=True)
else:
optimizer = build_optimizer(model, cfg.optimizer)
if 'runner' not in cfg:
cfg.runner = {
'type': 'EpochBasedRunner',
'max_epochs': cfg.total_epochs
}
warnings.warn(
'config is now expected to have a `runner` section, '
'please set `runner` in your config.', UserWarning)
else:
if 'total_epochs' in cfg:
assert cfg.total_epochs == cfg.runner.max_epochs
runner = build_runner(
cfg.runner,
default_args=dict(
model=model,
optimizer=optimizer,
work_dir=cfg.work_dir,
logger=logger,
meta=meta))
# an ugly workaround to make .log and .log.json filenames the same
runner.timestamp = timestamp
# fp16 setting
fp16_cfg = cfg.get('fp16', None)
if fp16_cfg is not None:
optimizer_config = Fp16OptimizerHook(
**cfg.optimizer_config, **fp16_cfg, distributed=distributed)
else:
optimizer_config = ApexOptimizerHook(**cfg.optimizer_config)
# register hooks
runner.register_training_hooks(
cfg.lr_config,
optimizer_config,
cfg.checkpoint_config,
cfg.log_config,
cfg.get('momentum_config', None),
custom_hooks_config=cfg.get('custom_hooks', None))
if distributed:
if isinstance(runner, EpochBasedRunner):
runner.register_hook(DistSamplerSeedHook())
# register eval hooks
if validate:
val_samples_per_gpu = (cfg.data.get('val_dataloader', {})).get(
'samples_per_gpu', cfg.data.get('samples_per_gpu', 1))
if val_samples_per_gpu > 1:
# Support batch_size > 1 in test for text recognition
# by disable MultiRotateAugOCR since it is useless for most case
cfg = disable_text_recog_aug_test(cfg)
cfg = replace_image_to_tensor(cfg)
val_dataset = build_dataset(cfg.data.val, dict(test_mode=True))
val_loader_cfg = {
**default_loader_cfg,
**dict(shuffle=False, drop_last=False),
**cfg.data.get('val_dataloader', {}),
**dict(samples_per_gpu=val_samples_per_gpu)
}
val_dataloader = build_dataloader(val_dataset, **val_loader_cfg)
eval_cfg = cfg.get('evaluation', {})
eval_cfg['by_epoch'] = cfg.runner['type'] != 'IterBasedRunner'
eval_hook = DistEvalHook if distributed else EvalHook
runner.register_hook(eval_hook(val_dataloader, **eval_cfg))
if cfg.resume_from:
runner.resume(cfg.resume_from)
elif cfg.load_from:
runner.load_checkpoint(cfg.load_from)
runner.run(data_loaders, cfg.workflow)
def init_random_seed(seed=None, device='cuda'):
"""Initialize random seed. If the seed is None, it will be replaced by a
random number, and then broadcasted to all processes.
Args:
seed (int, Optional): The seed.
device (str): The device where the seed will be put on.
Returns:
int: Seed to be used.
"""
if seed is not None:
return seed
# Make sure all ranks share the same random seed to prevent
# some potential bugs. Please refer to
# https://github.com/open-mmlab/mmdetection/issues/6339
rank, world_size = get_dist_info()
seed = np.random.randint(2**31)
if world_size == 1:
return seed
if rank == 0:
random_num = torch.tensor(seed, dtype=torch.int32, device=device)
else:
random_num = torch.tensor(0, dtype=torch.int32, device=device)
dist.broadcast(random_num, src=0)
return random_num.item()
| [
"[email protected]"
]
| |
c37e90516146a963e73064dbae83398fa95b20e3 | 1d48ddd72477de7d9ad98eef61bdfb406859b31c | /04. asyncio/web_scraping/test_pg.py | 31752e74e37bf55d125a66ca1feeb9777c26d7ae | []
| no_license | alexshchegretsov/async_techniques | b68d27de58bc2393520eb080838b2c72d356d2f3 | 42118504a39ccbd0bebad4ed41eba4b5c2e3d5dd | refs/heads/master | 2020-12-04T06:40:34.712114 | 2020-01-06T20:59:58 | 2020-01-06T20:59:58 | 231,661,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 450 | py | # -*- coding: utf-8 -*-
import asyncio
import asyncpg
async def run():
# conn = await asyncpg.connect(user="async", password="Dexter89!", database="async_db", host="127.0.0.1", port="5432")
conn = await asyncpg.connect("postgresql://async:Dexter89!@localhost/async_db")
values = await conn.fetch("""select * from talks_headers""")
await conn.close()
print(values, len(values))
if __name__ == '__main__':
asyncio.run(run())
| [
"[email protected]"
]
| |
6c1bf8a8173f069af524c50af7366e3150d5b5a6 | 5adb0e3bce712efb68b241421cd12e71d0401d98 | /tasks/ehco.py | acf54d41b71c3edc251e2ea7866628ff0119bf2b | [
"MIT"
]
| permissive | librestar/backend | 8e945c3953ec59b4717704a5ebfc613ed756cba1 | 9060453d140d4c1785b370fd548be519d04047d4 | refs/heads/main | 2023-02-11T03:36:33.584588 | 2021-01-14T07:34:08 | 2021-01-14T07:34:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,372 | py | import json
import typing as t
import ansible_runner
from uuid import uuid4
from app.db.session import SessionLocal
from app.db.models.port import Port
from app.db.models.user import User
from app.db.models.server import Server
from app.db.models.port_forward import PortForwardRule
from app.db.crud.server import get_server
from app.db.crud.port import get_port
from tasks import celery_app
from tasks.utils.runner import run_async
from tasks.utils.handlers import iptables_finished_handler, status_handler
@celery_app.task()
def ehco_runner(
port_id: int,
server_id: int,
port_num: int,
args: str = None,
remote_ip: str = None,
update_status: bool = False,
**kwargs,
):
server = get_server(SessionLocal(), server_id)
extravars = {
"host": server.ansible_name,
"local_port": port_num,
"remote_ip": remote_ip,
"ehco_args": args,
"update_status": update_status,
"update_ehco": update_status and not server.config.get('ehco'),
}
r = run_async(
server=server,
playbook="ehco.yml",
extravars=extravars,
status_handler=lambda s, **k: status_handler(port_id, s, update_status),
finished_callback=iptables_finished_handler(server, port_id, True)
if update_status
else lambda r: None,
)
return r[1].config.artifact_dir
| [
"[email protected]"
]
| |
63406186486569e40cecf5de8a6cae1dc00ae400 | f54070cd3048a3645cb25f301592a904d387a1c9 | /python_prgrams/testpython/class.py | d90814e0b94bcc93934d6f3342591b4b93ec4eaa | []
| no_license | mak705/Python_interview | 02bded60417f1e6e2d81e1f6cde6961d95da2a8e | aff2d6018fd539dbcde9e3a6b3f8a69167ffca0d | refs/heads/master | 2020-03-22T21:03:34.018919 | 2019-11-15T08:51:34 | 2019-11-15T08:51:34 | 140,653,056 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | class PartyAnimal:
x = 0
def party(self):
self.x = self.x + 1
print "so far ", self.x
an = PartyAnimal()
an.party()
an.party()
an.party()
print "Type", type(an)
print "Dir", dir(an)
#PartyAnimal.party(an)
| [
"[email protected]"
]
| |
dace21adfb00aaf1f2863a3e40f9256a2a67b538 | 2d6d24c0bfee13fc4682dee52075e78a552a8d1c | /tests/io/test_scanners.py | 88b4c30ae125ae42fe97d5aa7678fd851b13a7be | [
"MIT"
]
| permissive | sbiradarctr/pyTenable | b890875c5df3a1da676cebd57af51bc49666a7d2 | 2a6930cd7b29036780c291581d89ab33c0fd6679 | refs/heads/master | 2023-05-06T09:20:43.580412 | 2021-05-31T09:05:11 | 2021-05-31T09:05:11 | 371,701,521 | 0 | 0 | MIT | 2021-05-28T12:58:52 | 2021-05-28T12:58:52 | null | UTF-8 | Python | false | false | 7,003 | py | from tenable.errors import *
from ..checker import check, single
import uuid, pytest
@pytest.mark.vcr()
def test_scanner_control_scans_scanner_id_typeerror(api):
with pytest.raises(TypeError):
api.scanners.control_scan('nope', str(uuid.uuid4()), 'stop')
@pytest.mark.vcr()
def test_scanner_control_scans_scan_uuid_typeerror(api):
with pytest.raises(TypeError):
api.scanners.control_scan(1,1,'stop')
@pytest.mark.vcr()
def test_scanner_control_scans_action_typeerror(api):
with pytest.raises(TypeError):
api.scanners.control_scan(1,str(uuid.uuid4()), 1)
@pytest.mark.vcr()
def test_scanner_control_scans_action_unexpectedvalue(api):
with pytest.raises(UnexpectedValueError):
api.scanners.control_scan(1, str(uuid.uuid4()), 'nope')
@pytest.mark.vcr()
def test_scanner_control_scans_notfounderror(api):
with pytest.raises(NotFoundError):
api.scanners.control_scan(1,
'c5e3e4c9-ee47-4fbc-9e1d-d6f39801f56c', 'stop')
@pytest.mark.vcr()
def test_scanner_control_scans_permissionerror(stdapi):
with pytest.raises(PermissionError):
stdapi.scanners.control_scan(1,
'c5e3e4c9-ee47-4fbc-9e1d-d6f39801f56c', 'stop')
@pytest.mark.vcr()
def test_scanner_delete_id_typeerror(api):
with pytest.raises(TypeError):
api.scanners.delete('nope')
@pytest.mark.vcr()
def test_scanner_delete_notfound(api):
with pytest.raises(NotFoundError):
api.scanners.delete(1)
@pytest.mark.vcr()
def test_scanner_delete_permissionerror(stdapi, scanner):
with pytest.raises(PermissionError):
stdapi.scanners.delete(scanner['id'])
@pytest.mark.skip(reason="We don't want to actually delete scanners.")
def test_scanner_delete(api, scanner):
api.scanners.delete(scanner['id'])
@pytest.mark.vcr()
def test_scanner_details_id_typeerror(api):
with pytest.raises(TypeError):
api.scanners.details('nope')
@pytest.mark.vcr()
def test_scanner_details_notfounderror(api):
with pytest.raises(NotFoundError):
api.scanners.details(1)
@pytest.mark.vcr()
def test_scanner_details_permissionerror(stdapi, scanner):
with pytest.raises(PermissionError):
stdapi.scanners.details(scanner['id'])
@pytest.mark.vcr()
def test_scanner_details(api, scanner):
s = api.scanners.details(scanner['id'])
check(s, 'id', int)
check(s, 'uuid', 'scanner-uuid')
check(s, 'name', str)
check(s, 'type', str)
check(s, 'status', str)
check(s, 'scan_count', int)
check(s, 'engine_version', str)
check(s, 'platform', str)
check(s, 'loaded_plugin_set', str)
check(s, 'owner', str)
check(s, 'pool', bool)
@pytest.mark.vcr()
def test_scanner_edit_id_typeerror(api):
with pytest.raises(TypeError):
api.scanners.edit('nope')
@pytest.mark.vcr()
def test_sanner_edit_plugin_update_typeerror(api, scanner):
with pytest.raises(TypeError):
api.scanners.edit(scanner['id'], force_plugin_update='yup')
@pytest.mark.vcr()
def test_scanner_edit_ui_update_typeerror(api, scanner):
with pytest.raises(TypeError):
api.scanners.edit(scanner['id'], force_ui_update='yup')
@pytest.mark.vcr()
def test_scanner_edit_finish_update_typeerror(api, scanner):
with pytest.raises(TypeError):
api.scanners.edit(scanner['id'], finish_update='yup')
@pytest.mark.vcr()
def test_scanner_edit_registration_code_typeerror(api, scanner):
with pytest.raises(TypeError):
api.scanners.edit(scanner['id'], registration_code=False)
@pytest.mark.vcr()
def test_scanner_edit_aws_update_typeerror(api, scanner):
with pytest.raises(TypeError):
api.scanners.edit(scanner['id'], aws_update_interval='nope')
@pytest.mark.vcr()
@pytest.mark.xfail(raises=PermissionError)
def test_scanner_edit_notfounderror(api):
with pytest.raises(NotFoundError):
api.scanners.edit(1, force_ui_update=True)
@pytest.mark.vcr()
def test_scanner_edit_permissionserror(stdapi, scanner):
with pytest.raises(PermissionError):
stdapi.scanners.edit(scanner['id'], force_ui_update=True)
@pytest.mark.vcr()
@pytest.mark.xfail(raises=PermissionError)
def test_scanner_edit(api, scanner):
api.scanners.edit(scanner['id'], force_plugin_update=True)
@pytest.mark.vcr()
def test_scanner_get_aws_targets_id_typeerror(api):
with pytest.raises(TypeError):
api.scanners.get_aws_targets('nope')
@pytest.mark.vcr()
def test_scanner_get_aws_targets_notfounderror(api):
with pytest.raises(NotFoundError):
api.scanners.get_aws_targets(1)
@pytest.mark.vcr()
@pytest.mark.xfail(raises=NotFoundError)
def test_scanner_get_aws_targets_permissionerror(stdapi):
with pytest.raises(PermissionError):
stdapi.scanners.get_aws_targets(1)
@pytest.mark.skip(reason="No AWS Environment to test against.")
@pytest.mark.vcr()
def test_scanner_get_aws_targets(api, scanner):
pass
@pytest.mark.vcr()
def test_scanner_key_id_typeerror(api):
with pytest.raises(TypeError):
api.scanners.get_scanner_key('nope')
@pytest.mark.vcr()
def test_scanner_key(api, scanner):
assert isinstance(api.scanners.get_scanner_key(scanner['id']), str)
@pytest.mark.vcr()
def test_get_scans_id_typeerror(api):
with pytest.raises(TypeError):
api.scanners.get_scans('nope')
@pytest.mark.vcr()
def test_get_scans_notfounderror(api):
with pytest.raises(NotFoundError):
api.scanners.get_scans(1)
@pytest.mark.vcr()
def test_get_scans_permissionerror(stdapi, scanner):
with pytest.raises(PermissionError):
stdapi.scanners.get_scans(scanner['id'])
@pytest.mark.vcr()
def test_get_scans(api, scanner):
assert isinstance(api.scanners.get_scans(scanner['id']), list)
@pytest.mark.vcr()
def test_list_scanners_permissionerror(stdapi):
with pytest.raises(PermissionError):
stdapi.scanners.list()
@pytest.mark.vcr()
def test_list_scanners(api):
assert isinstance(api.scanners.list(), list)
@pytest.mark.vcr()
def test_link_state_id_typeerror(api):
with pytest.raises(TypeError):
api.scanners.toggle_link_state('nope', True)
@pytest.mark.vcr()
def test_link_state_linked_typeerror(api):
with pytest.raises(TypeError):
api.scanners.toggle_link_state(1, 'nope')
@pytest.mark.vcr()
def test_link_state_permissionerror(stdapi, scanner):
with pytest.raises(PermissionError):
stdapi.scanners.toggle_link_state(scanner['id'], True)
@pytest.mark.vcr()
def test_link_state(api, scanner):
api.scanners.toggle_link_state(scanner['id'], True)
@pytest.mark.vcr()
def test_scanners_get_permissions(api, scanner):
perms = api.scanners.get_permissions(scanner['id'])
assert isinstance(perms, list)
for p in perms:
check(p, 'type', str)
check(p, 'permissions', int)
@pytest.mark.vcr()
def test_scanner_edit_permissions(api, scanner, user):
api.scanners.edit_permissions(scanner['id'],
{'type': 'default', 'permissions': 16},
{'type': 'user', 'id': user['id'], 'permissions': 16}) | [
"[email protected]"
]
| |
ab6a077030d7e71350326b60b2622c761eac3670 | ca539b0df7ca5a91f80b2e2f64e7379e69243298 | /87.py | 219641b62a1f8827bc7e6a09e66208ccf7bb59c1 | []
| no_license | yorick76ee/leetcode | 9a9e5d696f3e32d9854c2ed9804bd0f98b03c228 | d9880892fe15f9bb2916beed3abb654869945468 | refs/heads/master | 2020-03-18T22:59:29.687669 | 2016-07-18T19:56:55 | 2016-07-18T19:56:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,452 | py | class Solution(object):
def lettercount(self,s1,s2):
dict1,dict2={},{}
for i in range(len(s1)):
if s1[i] not in dict1:
dict1[s1[i]] = 1
else:
dict1[s1[i]] += 1
if s2[i] not in dict2:
dict2[s2[i]] = 1
else:
dict2[s2[i]] += 1
for i in range(len(s1)):
char = s1[i]
try:
if dict1[char] != dict2[char]:
return False
except:
return False
return True
def recursive(self,s1,s2):
length = len(s1)
if length == 1 or s1 == s2:
return s1 == s2
if not self.lettercount(s1,s2):
return False
for i in range(1,length):
s1_one = s1[:i]
s2_one = s2[:i]
s1_two = s1[i:]
s2_two = s2[i:]
one_flag,two_flag = False,False
if (s1_one,s2_one) in self.dp:
one_flag = self.dp[(s1_one,s2_one)]
else:
one_flag = self.recursive(s1_one,s2_one)
if (s1_two,s2_two) in self.dp:
two_flag = self.dp[(s1_two,s2_two)]
else:
two_flag = self.recursive(s1_two,s2_two)
if one_flag and two_flag:
self.dp[(s1,s2)] = True
return True
for i in range(1,length):
s1_one = s1[:i]
s2_one = s2[length-i:]
s1_two = s1[i:]
s2_two = s2[:length-i]
one_flag,two_flag = False,False
if (s1_one,s2_one) in self.dp:
one_flag = self.dp[(s1_one,s2_one)]
else:
one_flag = self.recursive(s1_one,s2_one)
if (s1_two,s2_two) in self.dp:
two_flag = self.dp[(s1_two,s2_two)]
else:
two_flag = self.recursive(s1_two,s2_two)
if one_flag and two_flag:
self.dp[(s1,s2)] = True
return True
self.dp[(s1,s2)] = False
return False
def isScramble(self, s1, s2):
"""
:type s1: str
:type s2: str
:rtype: bool
"""
self.dp = {}
return self.recursive(s1,s2)
if __name__ == '__main__':
wds= Solution()
print wds.isScramble('oatzzffqpnwcxhejzjsnpmkmzngneo','acegneonzmkmpnsjzjhxwnpqffzzto')
| [
"[email protected]"
]
| |
365f848ad8dde1db19f683afd8439f0362e34fb7 | e3a674666de18e3b722bfd36e54d6a32e3f0b726 | /html/default.py | 6971548d1f71ed3f49da66c818ddae27850fbfbf | []
| no_license | sauloaldocker/lamp | 92d52c3105cd1d00d816138a64de66643fda67c3 | 9088f899e9a4e7e04941518041e10630cfdf71f1 | refs/heads/master | 2021-01-20T04:36:21.783064 | 2017-04-02T13:22:02 | 2017-04-02T13:22:02 | 21,629,683 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-# enable debugging
import cgitb
import os
import sys
cgitb.enable()
print "Content-Type: text/html;charset=utf-8"
print
print "<h1>argv</h1>"
print "<table>"
for k in sys.argv:
print "<tr><td>%s</td></tr>" % (k)
print "</table>"
print "<h1>environ</h1>"
print "<table>"
for k in os.environ:
print "<tr><td><b>%s</b></td><td>%s</td></tr>" % (k, os.environ[k])
print "</table>"
print "<h1>path</h1>"
print "<table>"
for k in sys.path:
print "<tr><td>%s</td></tr>" % (k)
print "</table>"
| [
"[email protected]"
]
| |
cd30dee9c2e39d4d74f5da68dd97c87656ac6d03 | ecd27923efba50703a7bfbfa2ba37a8cc78560ea | /automatic_scraper/config/bid/liriqing/shandong_taian_ggzy_config.py | bd234c5293803ff68ced61e5c97669fc19eb8d3a | []
| no_license | yougecn/work | fb691b072a736731083777e489712dee199e6c75 | 1b58525e5ee8a3bdecca87fdee35a80e93d89856 | refs/heads/master | 2022-03-03T19:14:17.234929 | 2018-04-17T12:29:19 | 2018-04-17T12:29:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,631 | py | # coding: utf-8
import time
import logging
import re
logger = logging.getLogger(__name__)
author = "liriqing"
web_title = u"泰安市公共资源交易网"
data_source = 'http://www.taggzyjy.com.cn'
start_urls = [
##政府
#招标
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002001/075002001001/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002001/075002001004/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002001/075002001005/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002001/075002001006/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002001/075002001007/",
#中标
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002002/075002002001/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002002/075002002004/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002002/075002002005/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002002/075002002006/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002002/075002002007/",
#更正
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002003/075002003001/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002003/075002003004/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002003/075002003005/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002003/075002003006/",
"http://www.taggzyjy.com.cn/Front/jyxx/075002/075002003/075002003007/"
]
db_config = {
'host': '127.0.0.1',
'port': 3306,
'user': 'root',
'password': 'asd123',
'database': 'bid_data',
'table': 'zhaotoubiao'
}
# 列表页模板
index_pattern = {
"_list": {'pattern': "//tr[@height='30']", 'type': 'xpath', 'target': 'html', 'custom_func_name': ''},
"_next_page": {'pattern': "//td[text() = '下页 >' and @onclick]", 'type': 'xpath', 'target': 'html', 'custom_func_name': ''},
"title": {'pattern': "//a[@target='_blank']", 'type': 'xpath', 'target': 'text', 'custom_func_name': ''},
"issue_time": {'pattern': "//td[@width='80']", 'type': 'xpath', 'target': 'text', 'custom_func_name': ''},
}
# 详情页模板
detail_pattern = {
"sc": {'pattern': "//td[@id='TDContent']/div[1]", 'type': 'xpath', 'target': 'clean_html', 'custom_func_name': ''},
}
def init(item):
"""初始化时执行"""
logger.info(u'init item: %s', item)
item['_web_title'] = item['web_title']
del item['web_title']
item['region']=u'山东-泰安市'
item['_delay_between_pages'] = 3
def process_list_item(list_element, item):
"""处理列表页元素
:param list_element: _list模板解析出的html元素
:param item:
获取列表页后,根据_list模板获取每一个详情html代码后执行
有些内容可在列表页获取,可自定义在此处理,如:
item['pub_date'] = pq(list_element).find('span').text()
"""
item['issue_time'] = int(time.mktime(time.strptime(item['issue_time'][1:-1], "%Y-%m-%d")))
if '075002001'in item['_current_start_url']:
item['bid_type']= 1
elif '075002002'in item['_current_start_url']:
item['bid_type']= 0
elif '075002003' in item['_current_start_url']:
item['bid_type'] = 2
# 停止翻页
# if item['_current_page'] == 10:
# item['_click_next'] = False
def process_detail_item(item):
"""处理详情页
:param item:
获取详情页信息,存入item后执行
可在此处理程序无法处理的情况
如详情页无法解析发布时间,需要使用正则表达式从content中提取等
"""
if len(item['sc']) > 0:
item['is_get'] = 1
else:
item['is_get'] = 0
| [
"[email protected]"
]
| |
51b0ecc3f68e0a7f94297a54e5a5c33b9f699b5b | 658e2e3cb8a4d5343a125f7deed19c9ebf06fa68 | /course_DE/udacity-data-engineering-projects-master/Project 5 - Data Pipelines with Airflow/exercises/dags/3_ex3_subdags/subdag.py | 2751def0ecb6a5a10629e528018801bbdaf2210a | []
| no_license | yennanliu/analysis | 3f0018809cdc2403f4fbfe4b245df1ad73fa08a5 | 643ad3fed41961cddd006fadceb0e927f1db1f23 | refs/heads/master | 2021-01-23T21:48:58.572269 | 2020-10-13T22:47:12 | 2020-10-13T22:47:12 | 57,648,676 | 11 | 9 | null | null | null | null | UTF-8 | Python | false | false | 1,649 | py | # Instructions
# In this exercise, we’ll place our S3 to RedShift Copy operations into a SubDag.
# 1 - Consolidate HasRowsOperator into the SubDag
# 2 - Reorder the tasks to take advantage of the SubDag Operators
import datetime
from airflow import DAG
from airflow.operators.postgres_operator import PostgresOperator
from airflow.operators.udacity_plugin import HasRowsOperator
from airflow.operators.udacity_plugin import S3ToRedshiftOperator
import sql_statements.py
# Returns a DAG which creates a table if it does not exist, and then proceeds
# to load data into that table from S3. When the load is complete, a data
# quality check is performed to assert that at least one row of data is
# present.
def get_s3_to_redshift_dag(
parent_dag_name,
task_id,
redshift_conn_id,
aws_credentials_id,
table,
create_sql_stmt,
s3_bucket,
s3_key,
*args, **kwargs):
dag = DAG(
f"{parent_dag_name}.{task_id}",
**kwargs
)
create_task = PostgresOperator(
task_id=f"create_{table}_table",
dag=dag,
postgres_conn_id=redshift_conn_id,
sql=create_sql_stmt
)
copy_task = S3ToRedshiftOperator(
task_id=f"load_{table}_from_s3_to_redshift",
dag=dag,
table=table,
redshift_conn_id=redshift_conn_id,
aws_credentials_id=aws_credentials_id,
s3_bucket=s3_bucket,
s3_key=s3_key
)
#
# TODO: Move the HasRowsOperator task here from the DAG
#
create_task >> copy_task
#
# TODO: Use DAG ordering to place the check task
#
return dag
| [
"[email protected]"
]
| |
36d6859f91412f1d9bc50c8d9093e25601f1b157 | 854b94d7be92582bd191a7cb63143a95e5b5c337 | /hyfetch/distros/postmarketos_small.py | 4dc2bd42a651c2a3c7f18c7ef7c07c17cd241449 | [
"MIT"
]
| permissive | hykilpikonna/hyfetch | 673c0c999d0f3f542349824495ad6004f450ebac | 98863df16d70b030696f4b94080d114396320f35 | refs/heads/master | 2023-08-17T10:41:10.289997 | 2023-08-17T03:37:23 | 2023-08-17T03:37:23 | 479,913,941 | 447 | 78 | MIT | 2023-09-14T14:39:18 | 2022-04-10T04:38:15 | Shell | UTF-8 | Python | false | false | 325 | py | # This file is automatically generated. Please do not modify.
from . import AsciiArt
postmarketos_small = AsciiArt(match=r'''"postmarketos_small"''', color='2 7', ascii=r"""
${c1} /\
/ \
/ \
\__ \
/\__ \ _\
/ / \/ __
/ / ____/ \
/ \ \ \
/_____/ /________\
""")
| [
"[email protected]"
]
| |
4998d14e229e37f835bbecc90cd2f99ce4d68860 | 78efa54b2b253f99ea7e073f783e6121c20cdb52 | /Codechef/Maximize The Sum.py | 6c263f96896aaeb642979ffca927fdf582635a67 | []
| no_license | NishchaySharma/Competitve-Programming | 32a93581ab17f05d20129471f7450f34ec68cc53 | 1ec44324d64c116098eb0beb74baac7f1c3395bb | refs/heads/master | 2020-04-08T04:02:46.599398 | 2020-01-01T15:51:39 | 2020-01-01T15:51:39 | 159,000,529 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | for _ in range(int(input())):
n=int(input())
arr=sorted(list(map(int,input().split())))
res=0
for i in range(n//2):
res+=abs(arr[i]-arr[n-i-1])
print(res)
| [
"[email protected]"
]
| |
3f59c6edd6e5a5576e24f61b7997b031a064e4d7 | a62c3f0f641c930d74aa4a43e14b0f1e8de71b5f | /pages/views.py | d3ee28ea642f9016e0fb679d2d6d97a165b998b5 | [
"MIT"
]
| permissive | ticotheps/scenic-realty-app | b2b02f509cff51d40d88c07fe5afff7c65c73c0c | c91caaee019d4790d444d02067a1a8e83ed554ba | refs/heads/develop | 2020-12-02T09:37:58.467839 | 2020-02-10T18:15:58 | 2020-02-10T18:15:58 | 230,966,666 | 0 | 0 | MIT | 2020-02-10T18:15:59 | 2019-12-30T19:10:19 | CSS | UTF-8 | Python | false | false | 204 | py | from django.shortcuts import render
from django.http import HttpResponse
def index(request):
return render(request, 'pages/index.html')
def about(request):
return render(request, 'pages/about.html') | [
"[email protected]"
]
| |
85115d1212270dde95742797c7074e489bb195c8 | e9c0b70cab39fa771db383fa882436c14ae9aec7 | /pizza_app/migrations/0001_initial.py | ece9b436685209c0100e8865b75f0d5b8d49abde | [
"MIT"
]
| permissive | rusrom/django_pizza_project | f4b67b558a6238b58e285f1b9eb38bf1c8cbadf5 | 350862ca49b91f5d5d4e12105846ecc9e4fc15c0 | refs/heads/master | 2020-07-16T05:45:07.229049 | 2019-09-02T14:14:21 | 2019-09-02T14:14:21 | 205,732,229 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,024 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2019-09-02 12:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='PizzaShop',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('phone', models.CharField(max_length=100)),
('address', models.CharField(max_length=100)),
('logo', models.ImageField(upload_to='logo/')),
('owner', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='pizzashop', to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
]
| |
5858339fb5fa9dbe8b8188ff43641fdd371396b9 | 1ee10e1d42b59a95a64d860f0477a69b016d1781 | /Lecture_03/Lecture Code/10_Matcher_3_Lexical_Attibutes.py | 00f1d77a02bad808777d7d520f42ccb07444ce0b | []
| no_license | KushalIsmael/NLP | 5564070a573d251d7222dda85b8025ae1f9c3c6f | d4ce567a009e149b0cb1781d3a341d25aa438916 | refs/heads/master | 2023-08-18T14:07:48.646386 | 2021-10-28T19:09:25 | 2021-10-28T19:09:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | import spacy
from spacy.matcher import Matcher
nlp = spacy.load("en_core_web_sm")
matcher = Matcher(nlp.vocab)
pattern = [{"IS_DIGIT": True}, {"LOWER": "fifa"}, {"LOWER": "world"},
{"LOWER": "cup"}, {"IS_PUNCT": True}]
matcher.add("FIFA", [pattern])
doc = nlp("2018 FIFA World Cup: France won!")
matches = matcher(doc)
for match_id, start, end in matches:
matched_span = doc[start:end]
print(matched_span.text) | [
"[email protected]"
]
| |
23de31fa7213263f9a98e2bd707d3c2d771dd3be | eda36d24a1e6d4f30597ab1e1b2d8e17694f93bd | /weio/tests/test_turbsim.py | 2afe6ac46c1e982c0352cf1e40abbc37dad84357 | [
"MIT"
]
| permissive | ebranlard/weio | 31fdab7a8afde9919f66fab942dad309f8d8d0e2 | 50fab087c5dc3e0248bcce578de6e713fa3e9b5f | refs/heads/main | 2023-07-23T19:32:42.548855 | 2022-12-19T08:13:06 | 2022-12-19T08:13:06 | 152,828,434 | 25 | 20 | MIT | 2023-01-13T20:37:29 | 2018-10-13T02:44:25 | Python | UTF-8 | Python | false | false | 1,734 | py | import unittest
import os
import numpy as np
from .helpers_for_test import MyDir, reading_test
try:
from weio.turbsim_file import TurbSimFile
except:
from weio.weio.turbsim_file import TurbSimFile
class Test(unittest.TestCase):
def test_001_read_all(self, DEBUG=True):
reading_test('TurbSim_*.*', TurbSimFile)
def test_TurbSim(self):
# --- Test without tower
F = TurbSimFile(os.path.join(MyDir,'TurbSim_NoTwr.bts'))
F.write( os.path.join(MyDir,'TurbSim_NoTwr_TMP.bts'))
F2= TurbSimFile(os.path.join(MyDir,'TurbSim_NoTwr_TMP.bts'))
os.remove( os.path.join(MyDir,'TurbSim_NoTwr_TMP.bts'))
np.testing.assert_almost_equal(F['u'][0,:,:,:],F2['u'][0,:,:,:],4)
np.testing.assert_almost_equal(F['u'][1,:,:,:],F2['u'][1,:,:,:],4)
np.testing.assert_almost_equal(F['u'][2,:,:,:],F2['u'][2,:,:,:],4)
# --- Test with tower
F = TurbSimFile(os.path.join(MyDir,'TurbSim_WithTwr.bts'))
np.testing.assert_almost_equal(F['u'][2,-1,1,3], 0.508036, 5)
np.testing.assert_almost_equal(F['u'][0, 4,2,0], 7.4867466, 5)
np.testing.assert_almost_equal(F['uTwr'][0, 4, :], [6.1509, 6.4063, 8.9555, 7.6943], 4)
F.write( os.path.join(MyDir,'TurbSim_WithTwr_TMP.bts'))
F2= TurbSimFile(os.path.join(MyDir,'TurbSim_WithTwr_TMP.bts'))
os.remove( os.path.join(MyDir,'TurbSim_WithTwr_TMP.bts'))
np.testing.assert_almost_equal(F['u'][0,:,:,:],F2['u'][0,:,:,:],3)
np.testing.assert_almost_equal(F['u'][1,:,:,:],F2['u'][1,:,:,:],3)
np.testing.assert_almost_equal(F['u'][2,:,:,:],F2['u'][2,:,:,:],3)
if __name__ == '__main__':
# Test().test_000_debug()
unittest.main()
| [
"[email protected]"
]
| |
b1c10929ca27cebfc8f32d5fa3e33f13d3744bd3 | c251401a04faee549a5255745dc976c2be8e24b9 | /work_orders/permissions.py | 15b4821acb2a82a375b098f4d93f2ef74b862691 | []
| no_license | fengo4142/aero-django-backend | a43a3526b570730fd9d519b8e890e550ff9f9f3c | 53167b52b68b30eef6a10edea47888ba0ad71a4e | refs/heads/master | 2022-11-11T10:01:50.534513 | 2020-06-24T15:40:11 | 2020-06-24T15:40:11 | 274,699,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,948 | py | import logging
from rest_framework.permissions import BasePermission
from work_orders.models import WorkOrderForm
logger = logging.getLogger('backend')
# *****************************************************************************
# ***************************** WORK ORDERS *******************************
# *****************************************************************************
class CanCreateWorkOrders(BasePermission):
"""Allows to create a work order"""
def has_permission(self, request, view):
if request.user is None or not request.user.is_authenticated:
return False
# if (request.method == 'POST' and request.user.has_perm(
# "work_orders.add_workorder")):
# return True
if (request.method == 'POST' and request.user.aerosimple_user and \
request.user.aerosimple_user.has_permission("add_workorder")):
return True
return False
class CanViewWorkOrders(BasePermission):
"""Allows to view work orders list and detail """
def has_permission(self, request, view):
if request.user is None or not request.user.is_authenticated:
return False
# if (request.method == 'GET' and request.user.has_perm(
# "work_orders.view_workorder")):
# return True
if (request.method == 'GET' and request.user.aerosimple_user and \
request.user.aerosimple_user.has_permission("view_workorder")):
return True
return False
class CanFillMaintenanceForm(BasePermission):
"""Allows to create a Maintenance form"""
def has_permission(self, request, view):
if request.user is None or not request.user.is_authenticated:
return False
woform = WorkOrderForm.objects.get(
airport__id=request.user.aerosimple_user.airport_id)
role = woform.maintenance_form.assigned_role
users = woform.maintenance_form.assigned_users
has_role = role in request.user.aerosimple_user.roles.all()
is_assigned = request.user.aerosimple_user in users.all()
if (request.method == 'POST' and request.user.aerosimple_user
and request.user.aerosimple_user.has_permission("add_maintenance")
and request.user.aerosimple_user.has_permission("view_workorder")
and (has_role or is_assigned)):
return True
return False
class CanFillOperationsForm(BasePermission):
"""Allows to create a Operations form"""
def has_permission(self, request, view):
if request.user is None or not request.user.is_authenticated:
return False
woform = WorkOrderForm.objects.get(
airport__id=request.user.aerosimple_user.airport_id)
role = woform.operations_form.assigned_role
users = woform.operations_form.assigned_users
has_role = role in request.user.aerosimple_user.roles.all()
is_assigned = request.user.aerosimple_user in users.all()
if (request.method == 'POST' and request.user.aerosimple_user
and request.user.aerosimple_user.has_permission("add_operations")
and request.user.aerosimple_user.has_permission("view_workorder")
and (has_role or is_assigned)):
return True
return False
class CanEditWorkOrderSchema(BasePermission):
"""Allows to create work order schema instances"""
def has_permission(self, request, view):
if request.user is None or not request.user.is_authenticated:
return False
# if (request.method == 'POST' and request.user.has_perm(
# "work_orders.add_workorderschema")):
# return True
if (request.method == 'POST' and request.user.aerosimple_user and \
request.user.aerosimple_user.has_permission("add_workorderschema")):
return True
return False | [
"[email protected]"
]
| |
e41d486baf0f584817240d5dfb4283ad35235fff | a80884040ce1c178274a3068d216f440dd541844 | /tests/operators/test_group_by.py | 148a994d874624aae29cd6aea6bd533dc90abce8 | [
"MIT"
]
| permissive | maki-nage/rxsci | a4aae51edc1ef684b55df22e34c11aa1d54ef740 | 915e59ebf593c4b313265bb87cf0e1209ec2ee0f | refs/heads/master | 2023-01-19T14:32:11.638497 | 2023-01-17T08:06:35 | 2023-01-17T08:06:35 | 242,592,973 | 9 | 2 | MIT | 2022-11-08T21:54:16 | 2020-02-23T21:23:56 | Python | UTF-8 | Python | false | false | 4,013 | py | import rx
import rx.operators as ops
import rxsci as rs
from ..utils import on_probe_state_topology
def test_group_by_obs():
source = [1, 2, 2, 1]
actual_error = []
actual_completed = []
actual_result = []
mux_actual_result = []
def on_completed():
actual_completed.append(True)
store = rs.state.StoreManager(store_factory=rs.state.MemoryStore)
rx.from_(source).pipe(
rs.state.with_store(
store,
rx.pipe(
rs.ops.group_by(
lambda i: i,
rx.pipe(
ops.do_action(mux_actual_result.append),
),
))
),
).subscribe(
on_next=actual_result.append,
on_completed=on_completed,
on_error=actual_error.append,
)
assert actual_error == []
assert actual_completed == [True]
assert actual_result == source
assert type(mux_actual_result[0]) is rs.state.ProbeStateTopology
assert mux_actual_result[1:] == [
rs.OnCreateMux((0 ,(0,)), store),
rs.OnNextMux((0, (0,)), 1, store),
rs.OnCreateMux((1, (0,)), store),
rs.OnNextMux((1, (0,)), 2, store),
rs.OnNextMux((1, (0,)), 2, store),
rs.OnNextMux((0, (0,)), 1, store),
rs.OnCompletedMux((0, (0,)), store),
rs.OnCompletedMux((1, (0,)), store),
]
def test_group_by_list():
source = [1, 2, 2, 1]
actual_error = []
actual_completed = []
actual_result = []
mux_actual_result = []
def on_completed():
actual_completed.append(True)
store = rs.state.StoreManager(store_factory=rs.state.MemoryStore)
rx.from_(source).pipe(
rs.state.with_store(
store,
rx.pipe(
rs.ops.group_by(
lambda i: i,
[
ops.do_action(mux_actual_result.append),
],
))
),
).subscribe(
on_next=actual_result.append,
on_completed=on_completed,
on_error=actual_error.append,
)
assert actual_error == []
assert actual_completed == [True]
assert actual_result == source
assert type(mux_actual_result[0]) is rs.state.ProbeStateTopology
assert mux_actual_result[1:] == [
rs.OnCreateMux((0 ,(0,)), store),
rs.OnNextMux((0, (0,)), 1, store),
rs.OnCreateMux((1, (0,)), store),
rs.OnNextMux((1, (0,)), 2, store),
rs.OnNextMux((1, (0,)), 2, store),
rs.OnNextMux((0, (0,)), 1, store),
rs.OnCompletedMux((0, (0,)), store),
rs.OnCompletedMux((1, (0,)), store),
]
def test_group_by_without_store():
actual_error = []
rx.from_([1, 2, 3, 4]).pipe(
rs.ops.group_by(
lambda i: i % 2 == 0,
pipeline=rx.pipe(
)
)
).subscribe(on_error=actual_error.append)
assert type(actual_error[0]) is ValueError
def test_forward_topology_probe():
actual_topology_probe = []
source = [1, 2, 3, 4]
rx.from_(source).pipe(
rs.state.with_memory_store(
rx.pipe(
rs.ops.group_by(
lambda i: i % 2 == 0,
pipeline=rx.pipe()
),
on_probe_state_topology(actual_topology_probe.append),
)
),
).subscribe()
assert len(actual_topology_probe) == 1
def test_empty_source():
source = []
actual_result = []
on_completed = []
actual_error = []
rx.from_(source).pipe(
rs.state.with_memory_store(
rx.pipe(
rs.ops.group_by(
lambda i: i % 2 == 0,
pipeline=[]
),
)
),
).subscribe(
on_next=actual_result.append,
on_completed=lambda: on_completed.append(True),
on_error=actual_error.append,
)
assert actual_result == []
| [
"[email protected]"
]
| |
ef4f31488ff1d5936c39d77fc37b29c55734102e | 4500003dcaa3eb92e2b9c6bca8987ec473fb5ec3 | /core/migrations/0006_post_slug.py | db41286dfce7136c7c34e38796bac248d7291c36 | []
| no_license | alikhundmiri/simpleweddingdjango | 0bb2bfc069bac075d759efa96eede55c68595cf4 | 57aa6576df368fde651f7f2b6863f693bbb57756 | refs/heads/master | 2022-12-17T22:36:18.674974 | 2020-06-14T08:10:09 | 2020-06-14T08:10:09 | 239,115,495 | 0 | 0 | null | 2022-12-08T03:51:09 | 2020-02-08T11:01:00 | HTML | UTF-8 | Python | false | false | 866 | py | # Generated by Django 3.0.3 on 2020-03-29 16:37
from django.db import migrations, models
from core.utils import random_string_generator
from django.utils.text import Truncator
from django.utils.text import slugify
def gen_slug(apps, schema_editor):
MyModel = apps.get_model('core', 'Post')
for row in MyModel.objects.all():
if not row.slug:
row.slug = slugify((Truncator(row.title).chars(200) +'-'+ random_string_generator(size=4)))
row.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0005_auto_20200329_2203'),
]
operations = [
migrations.AddField(
model_name='post',
name='slug',
field=models.SlugField(max_length=200, null=True),
),
migrations.RunPython(gen_slug, reverse_code=migrations.RunPython.noop),
]
| [
"[email protected]"
]
| |
b5450b3f9c338676f9ab05092e450396a19672b0 | 5d5f6ba3bdcb52b4750a5f28afa8a1a1019bfc9e | /django/extras/djangoForms/djangoFormApp/models.py | 464d6d00fd8196fb2c75dbf55badc599443656b0 | []
| no_license | eDiazGtz/pythonLearning | 06e96f2f5a6e48ac314cb815cf9fbf65d0b7c2c8 | 57d7b2292cf5d9769cce9adf765962c3c0930d6c | refs/heads/master | 2023-06-18T02:16:09.293375 | 2021-05-03T18:09:52 | 2021-05-03T18:09:52 | 335,090,531 | 0 | 0 | null | 2021-05-03T18:09:53 | 2021-02-01T21:35:24 | Python | UTF-8 | Python | false | false | 758 | py | from django.db import models
# Create your models here.
class UserManager(models.Manager):
def createValidator(self, postData):
errors = {}
if len(postData['firstName']) < 1:
errors["firstName"] = "First Name should be at least 1 character"
if len(postData['lastName']) < 1:
errors["lastName"] = "Last Name should be at least 1 character"
if len(postData['email']) > 50:
errors["email"] = "Email max length 50 Characters"
return errors
class User(models.Model):
firstName = models.CharField(max_length=17)
lastName = models.CharField(max_length=20)
email = models.CharField(max_length=50)
password = models.CharField(max_length=100)
objects = UserManager() | [
"[email protected]"
]
| |
1d1e5c80adae2a85e36764be6c6786ca13998bc7 | 3a771b72dae1aae406b94726bcbcf73915577b18 | /q38.py | 0a85a5450c76b409276bf18b448122f28c6bc171 | []
| no_license | SHANK885/Python-Basic-Programs | 4fcb29280412baa63ffd33efba56d9f59770c9dc | 157f0f871b31c4523b6873ce5dfe0d6e26a6dc61 | refs/heads/master | 2021-07-18T18:24:10.455282 | 2018-11-19T07:02:27 | 2018-11-19T07:02:27 | 138,009,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 294 | py | '''
Define a function which can generate a list where the values are square of numbers between 1 and 20 (both included).
Then the function needs to print the last 5 elements in the list.
'''
def lis(lower,upper):
l = []
for i in range(lower,upper+1):
l.append(i)
print(l[-5:])
lis(1,20) | [
"[email protected]"
]
| |
3106b4bc8e71a298aca6998c29c4550feecf1a1e | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_057/ch136_2020_04_01_12_32_08_786356.py | 4df310a8d6f0a4bc15990114f812d772610cae60 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54 | py | dinheiro = 10
print('dinheiro:'{0}).format (dinheiro)
| [
"[email protected]"
]
| |
b42d376714e61221c9b1932afe6a308354078de5 | 523fb785bda41e33546c929a5c2de6c93f98b434 | /专题学习/链表/mergeKLists.py | 89db71c8897b4a8abf67d8c47ea987374e83a389 | []
| no_license | lizhe960118/TowardOffer | afd2029f8f9a1e782fe56ca0ff1fa8fb37892d0e | a0608d34c6ed96c9071cc3b9bdf70c95cef8fcbd | refs/heads/master | 2020-04-27T10:33:21.452707 | 2019-05-02T10:47:01 | 2019-05-02T10:47:01 | 174,259,297 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 700 | py | # Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def mergeKLists(self, lists):
"""
:type lists: List[ListNode]
:rtype: ListNode
"""
#把所有链表放入数组,排序后建立新链表返回
res_list = []
for l in lists:
while(l):
res_list.append(l.val)
l = l.next
res_list.sort()
dummy = ListNode(-1)
head = dummy
for num in res_list:
head.next = ListNode(num)
head = head.next
return dummy.next | [
"[email protected]"
]
| |
89e4b0cc8cc580454793178a3e90e399b693f848 | 1cd853babf022779f3392eb9e1781f952d4f2c07 | /proposal.py | 45a17c9f760c1ab2575741bea87304eb7b516340 | [
"Apache-2.0"
]
| permissive | ksrhamdi/proCon3 | 84b53027305f609267393701b49f3e7efade9097 | f0d214651dae5cbdbd4f7ff881269fb1cc5501ad | refs/heads/master | 2022-11-10T06:58:07.931219 | 2020-06-03T18:01:10 | 2020-06-03T18:01:10 | 276,995,886 | 0 | 0 | Apache-2.0 | 2020-07-03T22:15:11 | 2020-07-03T22:15:10 | null | UTF-8 | Python | false | false | 5,885 | py | # Import external modules.
from google.appengine.api import memcache
from google.appengine.ext import ndb
import logging
import random
import time
# Import local modules.
from configuration import const as conf
from constants import Constants
const = Constants()
const.MAX_RETRY = 3
const.MIN_REAGGREGATE_DELAY_SEC = 60
# Parent key: RequestForProposals? No, use KeyProperty instead.
class Proposal(ndb.Model):
requestId = ndb.StringProperty() # May be null
title = ndb.StringProperty()
detail = ndb.StringProperty()
creator = ndb.StringProperty()
allowEdit = ndb.BooleanProperty()
voteAggregateStartTime = ndb.IntegerProperty()
numPros = ndb.IntegerProperty( default=0 )
numCons = ndb.IntegerProperty( default=0 )
netPros = ndb.IntegerProperty( default=0 ) # numPros - numCons
lastSumUpdateTime = ndb.IntegerProperty( default=0 )
@ndb.transactional( retries=const.MAX_RETRY )
def setEditable( proposalId, editable ):
proposalRecord = Proposal.get_by_id( int(proposalId) )
proposalRecord.allowEdit = editable
proposalRecord.put()
#####################################################################################
# Use tasklets for async counting pros/cons per proposal.
# If enough delay since voteAggregateStartTime... updates voteAggregateStartTime and returns flag.
@ndb.transactional( retries=const.MAX_RETRY )
def __setVoteAggStartTime( proposalId ):
proposalRecord = Proposal.get_by_id( int(proposalId) )
now = int( time.time() )
if proposalRecord.voteAggregateStartTime + const.MIN_REAGGREGATE_DELAY_SEC > now:
return False
proposalRecord.voteAggregateStartTime = now
proposalRecord.put()
return True
# Retrieves all reason vote counts for a proposal, sums their pro/con counts, and updates proposal pro/con counts.
@ndb.tasklet
def __updateVoteAggs( proposalId ):
reasons = yield Reason.query( Reason.proposalId==proposalId ).fetch_async() # Async
numPros = sum( reason.voteCount for reason in reasons if reason.proOrCon == conf.PRO )
numCons = sum( reason.voteCount for reason in reasons if reason.proOrCon == conf.CON )
__setNumProsAndCons( proposalId, numPros, numCons ) # Transaction
#####################################################################################
# Use sharded counter to count pros/cons per proposal.
const.NUM_SHARDS = 10
const.SHARD_KEY_TEMPLATE = '{}-{}'
const.COUNTER_CACHE_SEC = 10
class ProposalShard( ndb.Model ):
requestId = ndb.StringProperty()
proposalId = ndb.StringProperty()
numPros = ndb.IntegerProperty( default=0 )
numCons = ndb.IntegerProperty( default=0 )
@ndb.tasklet
def incrementTasklet( requestId, proposalId, prosInc, consInc ):
logging.debug( 'proposal.incrementAsync() proposalId={}'.format(proposalId) )
yield __incrementShard( requestId, proposalId, prosInc, consInc ) # Pause and wait for async transaction
# Cache sums in Proposal record, to make top proposals queryable by score.
# Rate-limit updates to Proposal, by storing last-update time
now = int( time.time() )
updateNow = yield __checkAndSetLastSumTime( proposalId, now ) # Pause and wait for async transaction
logging.debug( 'proposal.incrementAsync() updateNow=' + str(updateNow) )
if updateNow:
shardRecords = yield __getProposalShardsAsync( proposalId ) # Pause and wait for async
numPros = sum( s.numPros for s in shardRecords if s )
numCons = sum( s.numCons for s in shardRecords if s )
logging.debug( 'proposal.incrementAsync() numPros=' + str(numPros) + ' numCons=' + str(numCons) )
yield __setNumProsAndConsAsync( proposalId, numPros, numCons ) # Pause and wait for async transaction
logging.debug( 'proposal.incrementAsync() __setNumProsAndCons() done' )
@ndb.transactional_async( retries=const.MAX_RETRY )
def __incrementShard( requestId, proposalId, prosInc, consInc ):
shardNum = random.randint( 0, const.NUM_SHARDS - 1 )
shardKeyString = const.SHARD_KEY_TEMPLATE.format( proposalId, shardNum )
shardRec = ProposalShard.get_by_id( shardKeyString )
if shardRec is None:
shardRec = ProposalShard( id=shardKeyString, requestId=requestId, proposalId=proposalId )
shardRec.numPros += prosInc
shardRec.numCons += consInc
shardRec.put()
@ndb.transactional_async( retries=const.MAX_RETRY )
def __checkAndSetLastSumTime( proposalId, now ):
logging.debug( 'proposal.__checkAndSetLastSumTime() proposalId={}'.format(proposalId) )
proposalRecord = Proposal.get_by_id( int(proposalId) )
logging.debug( 'proposal.__checkAndSetLastSumTime() proposalRecord={}'.format(proposalRecord) )
if proposalRecord.lastSumUpdateTime + const.COUNTER_CACHE_SEC < now:
proposalRecord.lastSumUpdateTime = now
proposalRecord.put()
return True
else:
return False
def __getProposalShardsAsync( proposalId ):
shardKeyStrings = [ const.SHARD_KEY_TEMPLATE.format(proposalId, s) for s in range(const.NUM_SHARDS) ]
logging.debug( 'proposal.__getProposalShardsAsync() shardKeyStrings=' + str(shardKeyStrings) )
shardKeys = [ ndb.Key(ProposalShard, s) for s in shardKeyStrings ]
return ndb.get_multi_async( shardKeys )
@ndb.transactional_async( retries=const.MAX_RETRY )
def __setNumProsAndConsAsync( proposalId, numPros, numCons ):
__setNumProsAndConsImp( proposalId, numPros, numCons )
@ndb.transactional( retries=const.MAX_RETRY )
def __setNumProsAndCons( proposalId, numPros, numCons ):
__setNumProsAndConsImp( proposalId, numPros, numCons )
def __setNumProsAndConsImp( proposalId, numPros, numCons ):
proposalRecord = Proposal.get_by_id( int(proposalId) )
proposalRecord.numPros = numPros
proposalRecord.numCons = numCons
proposalRecord.netPros = numPros - numCons
proposalRecord.put()
| [
"[email protected]"
]
| |
ad8bc92067a56e68d2d6a41e02f85a5fc6f954e0 | 1d9a6406c859fda186f520bb4472c551fc572c7b | /src/hopla/cli/groupcmds/hatch.py | e3c85019653f241bbc5b6a5ab861095a0e1e838d | [
"Apache-2.0"
]
| permissive | rickie/hopla | af21b794ce6719d402721550e1ee4091790410b6 | 24a422194e42c03d5877dc167b2b07147326a595 | refs/heads/main | 2023-08-13T17:33:03.612293 | 2021-10-12T12:13:25 | 2021-10-12T12:13:25 | 408,538,704 | 0 | 0 | Apache-2.0 | 2021-09-20T17:30:15 | 2021-09-20T17:30:15 | null | UTF-8 | Python | false | false | 873 | py | #!/usr/bin/env python3
"""
The module with CLI code that handles the `hopla hatch` GROUP command.
"""
import sys
from typing import NoReturn
import click
import requests
from hopla.hoplalib.hatchery.hatchcontroller import HatchRequester
@click.group()
def hatch():
"""GROUP for hatching eggs."""
def hatch_egg(*, egg_name: str, potion_name: str) -> NoReturn:
"""
Hatch an egg by performing an API request and echo the result to the
terminal.
"""
requester = HatchRequester(
egg_name=egg_name,
hatch_potion_name=potion_name
)
response: requests.Response = requester.post_hatch_egg_request()
json: dict = response.json()
if json["success"] is True:
click.echo(f"Successfully hatched a {egg_name}-{potion_name}.")
sys.exit(0)
click.echo(f"{json['error']}: {json['message']}")
sys.exit(1)
| [
"[email protected]"
]
| |
6af9434c46be76fce9d56f3ea60f2fca581ad793 | bc0dd74217258c8bdd30e6095dfd7a3edca2dd09 | /assignments/CarND-Vehicle-Detection-P5/f2f.py | c7ddcf21d773d672880881636ee6f76213c48ccd | []
| no_license | akamlani/selfdrivingcar | d645872f4129fcd4c68c3d4967fdd9c784086cc8 | eadd43b4c6d60c71e283b7c43cba61030377eb47 | refs/heads/master | 2020-06-12T10:19:55.748107 | 2017-05-02T18:44:52 | 2017-05-02T18:44:52 | 75,585,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,461 | py | import numpy as np
import cv2
from scipy.ndimage.measurements import label
import viz_utils as viz
class Vehicle(object):
def __init__(self):
self.detected = False # was vehicle detected in last iteration
self.n_detections = 0 # number of times this vehicle has been seen
self.n_nondetections = 0 # number of consecutive times this hard has not been detected
self.xpixels = None # pixel x values of last detection
self.ypixels = None # pixel y values of last detection
self.recent_xfitted = [] # x position of last n fits of the bounding box
self.recent_yfitted = [] # y position of last n fits of bounding box
self.recent_wfitted = [] # width position of last n fits of bounding box
self.recent_hfitted = [] # height position of last n fits of bounding box
self.bestx = None # average x position of last n fits
self.besty = None # average y position of last n fits
self.bestw = None # average width of last n fits
self.besth = None # average height of last n fits
class F2FTracker(object):
def __init__(self, dimensions, window_size=10):
"""
window_size: 1 for single image, else window over multiple frames
"""
self.nframes = 0 # frame_cnt
self.window_size = window_size # nframes
self.threshold = 0 if window_size == 1 else 1
rows, cols = dimensions
self.heatmap = np.zeros((rows, cols, window_size), dtype=np.float32)
def process_frame(self, base_img, heatmap_coords):
# get current heatmap
window_idx = self.nframes % self.window_size
heat_curr = viz.add_heat(base_img, heatmap_coords)
self.heatmap[:, :, window_idx] = heat_curr
# create a smooth heatmap over a window of frames
curr_slice = self.heatmap[:, :, :self.nframes + 1]
item = curr_slice if self.nframes < self.window_size else self.heatmap
heat_smooth = np.mean(item, axis=2)
# improve heatmap instances
heat_thresh = viz.apply_threshold(heat_smooth, threshold=1)
# annotate image via heatmap
labels = label(heat_thresh)
draw_img = viz.draw_labeled_bboxes(base_img, labels)
self.nframes += 1
return draw_img, heat_thresh, labels
| [
"[email protected]"
]
| |
555ab84accb35fdd7a4be6c3279a0dfd0fda301b | 71acb7214efd91c0d327f6d8958e1798eadb4401 | /locations/spiders/lidl_be.py | 82d333e830b64a9538b85a87b7b5987b418fa8c1 | [
"CC0-1.0",
"MIT"
]
| permissive | alltheplaces/alltheplaces | 21b9f8b4ace1352e52ae7b8f8825a930d2cb033e | 1bcbb55cfcf06f2c714465570711f6e83f205c22 | refs/heads/master | 2023-08-30T19:45:35.098658 | 2023-08-30T17:51:54 | 2023-08-30T17:51:54 | 61,166,935 | 453 | 176 | NOASSERTION | 2023-09-14T17:16:40 | 2016-06-15T01:09:18 | Python | UTF-8 | Python | false | false | 1,410 | py | import re
from locations.hours import DAYS_FR, OpeningHours, day_range, sanitise_day
from locations.spiders.lidl_gb import LidlGBSpider
from locations.storefinders.virtualearth import VirtualEarthSpider
class LidlBESpider(VirtualEarthSpider):
name = "lidl_be"
item_attributes = LidlGBSpider.item_attributes
dataset_id = "2be5f76f36e8484e965e84b7ee0cd1b1"
dataset_name = "Filialdaten-BE/Filialdaten-BE"
key = "AvGfUYinH_I7qdNZWDlXTHHysoytHWqkqZpxHBN9Z0Z0YLQup0u6qZoB8uQXUW_p"
def parse_item(self, item, feature, **kwargs):
item["name"] = feature["ShownStoreName"]
oh = OpeningHours()
for day, start_time, end_time in re.findall(
r"(\w+ - \w+|\w+) (\d{2}:\d{2})-(\d{2}:\d{2})",
feature["OpeningTimes"],
):
if "-" in day:
start_day, end_day = day.split("-")
start_day = sanitise_day(start_day, DAYS_FR)
end_day = sanitise_day(end_day, DAYS_FR)
else:
start_day = sanitise_day(day, DAYS_FR)
end_day = None
if start_day and end_day:
for d in day_range(start_day, end_day):
oh.add_range(d, start_time, end_time)
elif start_day:
oh.add_range(start_day, start_time, end_time)
item["opening_hours"] = oh.as_opening_hours()
yield item
| [
"[email protected]"
]
| |
149a32f41cf34c3a51f8d317601177f0d4f27b59 | 067573d864754a7ce73014086cd6c9165e2b5ea0 | /scripts/pMSSMtree.cfg.py | a99460e708046205f3dac742f4ace7e7d0d8f716 | []
| no_license | UhhCmsAnalysis/Run2pMSSM | 3f586d8dcbaacd4de2ed908062fe9875b43fef4c | bb6c7c7309108b26ff1d8f2062f712d9b848555a | refs/heads/master | 2020-12-21T08:53:50.884254 | 2020-02-09T20:33:58 | 2020-02-09T20:33:58 | 236,379,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,020 | py | FILE = open("scripts/analyses.cfg.py")
exec(FILE)
FILE.close()
#################################
# parameters, masses, etc
#################################
treeCfg = [
[ "params",
{"files":"idata/parameters/params_batch*.txt","base":""}],
[ "moreparams",
{"files":"idata/moreparams/moreparams.txt","base":""}],
[ "fs",
{"files":"idata/fs/fs.txt","base":"fs"}],
[ "lilith",
{"files":"idata/moreparams/lilith.txt"}],
# [ "xsect13",
# {"files":"idata/xsect/xsect_13*txt","base":"","skip_ID":[],"skip_col":"pointName"}],
[ "xsect8",
{"files":"idata/xsect/xsect_8*txt","base":"","skip_ID":[2321,8344,6640],"skip_col":"pointName"}],
[ "xsect7",
{"files":"idata/xsect/xsect_7*txt","base":"","skip_ID":[2321,8344,6640]}],
]
datadir = "idata"
#################################
# likelihoods
#################################
def addLlhd2Cfg(anaList,ext=""):
for ana in anaList:
for sr in ana[1]:
base = ana[0]
base += sr
base += ext.replace(".","")
key = base + "_llhd"
files = datadir + "/" + ana[0] + "/llhd" + sr + ext + ".txt"
treeCfg.append([key,{"files":files,"base":base}])
addLlhd2Cfg(ana7)
addLlhd2Cfg(ana8)
addLlhd2Cfg(ana13)
addLlhd2Cfg(ana7n8n13)
#################################
# Z-values
#################################
def addZ2Cfg(anaList,ext=""):
for ana in anaList:
for sr in ana[1]:
base = ana[0]
base += sr
base += ext.replace(".","_")
key = base + "_Z"
files = datadir + "/" + ana[0] + "/Z" + sr + ext + ".txt"
treeCfg.append([key,{"files":files,"base":base}])
#addZ2Cfg(ana7)
#addZ2Cfg(ana8)
#addZ2Cfg(ana7n8)
addZ2Cfg(ana7z)
addZ2Cfg(ana8z)
addZ2Cfg(ana13z)
addZ2Cfg(ana7n8n13z)
addZ2Cfg(ana7n8n13lossyz)
################################
# print
################################
#for entry in treeCfg:
# print entry[0],entry[1]
| [
"[email protected]"
]
| |
6bacb134a528804dff45b812c5ea7e73e151f3ac | 0add7953d3e3ce2df9e8265102be39b758579753 | /built-in/TensorFlow/Research/cv/image_classification/Cars_for_TensorFlow/automl/vega/model_zoo/model_zoo.py | 5deb87cd07e7947c8ec193b4da018690b923ef91 | [
"Apache-2.0",
"MIT"
]
| permissive | Huawei-Ascend/modelzoo | ae161c0b4e581f8b62c77251e9204d958c4cf6c4 | df51ed9c1d6dbde1deef63f2a037a369f8554406 | refs/heads/master | 2023-04-08T08:17:40.058206 | 2020-12-07T08:04:57 | 2020-12-07T08:04:57 | 319,219,518 | 1 | 1 | Apache-2.0 | 2023-03-24T22:22:00 | 2020-12-07T06:01:32 | Python | UTF-8 | Python | false | false | 3,377 | py | # -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""Model zoo."""
import torch
import logging
import os
from vega.model_zoo.torch_vision_model import get_torchvision_model_file
from vega.search_space.networks import NetworkDesc, NetTypes
from vega.core.common import TaskOps
from vega.core.common.general import General
class ModelZoo(object):
"""Model zoo."""
@classmethod
def set_location(cls, location):
"""Set model zoo location.
:param location: model zoo location.
:type localtion: str.
"""
General.model_zoo.model_zoo_path = location
@classmethod
def get_model(cls, model_desc=None, model_checkpoint=None):
"""Get model from model zoo.
:param network_name: the name of network, eg. ResNetVariant.
:type network_name: str or None.
:param network_desc: the description of network.
:type network_desc: str or None.
:param model_checkpoint: path of model.
:type model_checkpoint: str.
:return: model.
:rtype: model.
"""
try:
network = NetworkDesc(model_desc)
model = network.to_model()
except Exception as e:
logging.error("Failed to get model, model_desc={}, msg={}".format(
model_desc, str(e)))
raise e
logging.info("Model was created.")
logging.debug("model_desc={}".format(model_desc))
if model_checkpoint is not None:
logging.info("Load model with weight.")
model = cls._load_pretrained_model(network, model, model_checkpoint)
logging.info("Model was loaded.")
return model
@classmethod
def _load_pretrained_model(cls, network, model, model_checkpoint):
if not model_checkpoint and network._model_type == NetTypes.TORCH_VISION_MODEL:
model_file_name = get_torchvision_model_file(network._model_name)
full_path = "{}/torchvision_models/checkpoints/{}".format(
TaskOps().model_zoo_path, model_file_name)
else:
full_path = model_checkpoint
logging.info("load model weights from file.")
logging.debug("Weights file: {}".format(full_path))
if not os.path.isfile(full_path):
raise "Pretrained model is not existed, model={}".format(full_path)
checkpoint = torch.load(full_path)
model.load_state_dict(checkpoint)
return model
@classmethod
def infer(cls, model, dataloader):
"""Infer the result."""
model.eval()
infer_result = []
with torch.no_grad():
model.cuda()
for _, input in enumerate(dataloader):
if isinstance(input, list):
input = input[0]
logits = model(input.cuda())
if isinstance(logits, tuple):
logits = logits[0]
infer_result.extend(logits)
return infer_result
| [
"[email protected]"
]
| |
cb1c16ee59fe20890a221136d81fcc1734dc8a2d | 940bdfb1d2014e0fdf8c1d138efb43935446864a | /ayush_crowdbotics_347/settings.py | 24c34f26d61c779f77db6396b510cd90b427c8e0 | []
| no_license | payush/ayush-crowdbotics-347 | f8568a28c0fd328161e9961d1f4ffc73ed1ff3de | 08b235df039628147296a723f18dc976317479db | refs/heads/master | 2020-03-23T19:49:01.171461 | 2018-07-23T11:14:46 | 2018-07-23T11:14:46 | 142,003,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,130 | py | """
Django settings for ayush_crowdbotics_347 project.
Generated by 'django-admin startproject' using Django 1.11.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'br8n%%zz9_*mw+%so6e=q21!m$82iugifwit)lyt@s^w207*4w'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ayush_crowdbotics_347.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ayush_crowdbotics_347.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
import environ
env = environ.Env()
ALLOWED_HOSTS = ['*']
SITE_ID = 1
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
DATABASES = {
'default': env.db()
}
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
LOCAL_APPS = [
'home',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
# allauth
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = None
LOGIN_REDIRECT_URL = '/'
| [
"[email protected]"
]
| |
0343a12712af23f99051af1d1eb45efc8aa04b53 | 5dfa9dfb2d2d604f54de7020aed11642f03f1186 | /SLAC/dark_defects_offline/v0/validator_dark_defects_offline.py | 30eb49152670cb1873fc87d6cdb693baf4218fea | [
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | lsst-camera-dh/harnessed-jobs | 49a9a65f3368771ff7b7b22caa94fc8f384681f4 | 352f48b70633b0f0e3faf941198edf1de85f4989 | refs/heads/master | 2021-03-19T16:57:36.199351 | 2019-03-10T21:18:46 | 2019-03-10T21:18:46 | 34,645,042 | 0 | 1 | null | 2018-04-03T23:37:34 | 2015-04-27T03:59:33 | Python | UTF-8 | Python | false | false | 1,050 | py | #!/usr/bin/env python
import lsst.eotest.sensor as sensorTest
import lcatr.schema
import siteUtils
import eotestUtils
sensor_id = siteUtils.getUnitId()
mask_file = '%s_dark_pixel_mask.fits' % sensor_id
eotestUtils.addHeaderData(mask_file, LSST_NUM=sensor_id, TESTTYPE='SFLAT_500',
DATE=eotestUtils.utc_now_isoformat(),
CCD_MANU=siteUtils.getCcdVendor().upper())
results = [lcatr.schema.fileref.make(mask_file)]
eotest_results = '%s_eotest_results.fits' % sensor_id
data = sensorTest.EOTestResults(eotest_results)
amps = data['AMP']
npixels = data['NUM_DARK_PIXELS']
ncolumns = data['NUM_DARK_COLUMNS']
for amp, npix, ncol in zip(amps, npixels, ncolumns):
results.append(lcatr.schema.valid(lcatr.schema.get('dark_defects'),
amp=amp,
dark_pixels=npix,
dark_columns=ncol))
results.append(siteUtils.packageVersions())
lcatr.schema.write_file(results)
lcatr.schema.validate_file()
| [
"[email protected]"
]
| |
46747fbc3c33b336048baf27aad12d4a044b8473 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /yfooETHj3sHoHTJsv_11.py | 3b80e0b8989222b1ece889e3f7396b901396c028 | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 366 | py | """
Create a function that returns `True` when `num1` is equal to `num2`;
otherwise return `False`.
### Examples
is_same_num(4, 8) ➞ False
is_same_num(2, 2) ➞ True
is_same_num(2, "2") ➞ False
### Notes
Don't forget to `return` the result.
"""
def is_same_num(num1, num2):
if num1==num2:
return True
else: return False
| [
"[email protected]"
]
| |
394530c218df75cb6d4205db5a36ae592e904fac | 768caee28ad4eddc7dd0b70b8f293854b25cf25b | /source/examples/mini-tutoriales/mipaquetepython/mipaquetepython/app.py | a3779dc5aadba97063a4ef9233bb278dbccbfc24 | []
| no_license | macagua/collective.spanishdocumentation | bb904cfaaf2e8151d4af18362f379f03a4351715 | a5638b87329dfe54746179bbf1848dad43d62012 | refs/heads/master | 2021-06-06T04:24:26.467552 | 2012-11-28T14:59:26 | 2012-11-28T14:59:26 | 2,941,330 | 2 | 2 | null | 2018-11-07T17:34:54 | 2011-12-08T16:27:56 | Python | UTF-8 | Python | false | false | 76 | py | var = raw_input("Introduzca alguna frase: ")
print "Usted introdujo: ", var
| [
"[email protected]"
]
| |
be8933396f92ba4e0bbc0f721914a0ef71410726 | 20cf2cb73adfed63cf182fc12a09aa3aadc033c6 | /filter.py | ba1f301e7c35530bd36538e7e6db9a0ebf49052c | []
| no_license | arunkumar27-ank-tech/Python-Programs | 678ae558e8c141a6302e2705849c97258974c4eb | a56788057d1bf8848681e38eb569874d84db7337 | refs/heads/master | 2023-06-16T14:50:36.146381 | 2021-07-15T13:57:54 | 2021-07-15T13:57:54 | 386,305,015 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py | from functools import reduce
lst = [1, 2, 3, 4, 5, 6, 7, 8, 9]
evens = list(filter(lambda n: n % 2 == 0, lst))
doubles = list(map(lambda n: n+2, evens))
sum1 = reduce(lambda a, b: a+b, doubles)
print(evens)
print(doubles)
print(sum1)
| [
"[email protected]"
]
| |
bf77466fc9d42438623ab91fe345fb7f007eef5d | cca70e45645d5b96f98b1328833d5b4ebb1c882e | /P20/P06.py | cf4525656b07a2c7601c33937201708a72cf69c6 | []
| no_license | webturing/Python3Programming_19DS12 | 9613a9808407b6abef0bc89ad8f74fc3920e789f | 5bbc1e10cec0ebf7d5dfb415a9d4bb07ce0b32ca | refs/heads/master | 2020-08-01T10:23:09.474316 | 2019-12-27T11:52:34 | 2019-12-27T11:52:34 | 210,964,665 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 951 | py | '''
赵、钱、孙、李、周五人围着一张圆桌吃饭。饭后,周回忆说:“吃饭时,赵坐在钱旁边,钱的左边是孙或李”;李回忆说:“钱坐在孙左边,我挨着孙坐”。
结果他们一句也没有说对。请问,他们在怎样坐的?
'''
def left(a, b):
return a + 1 == b or a == 5 and b == 1
def right(a, b):
return left(b, a)
def adj(a, b):
return right(a, b) or left(a, b)
zhao, qian, sun, li, zhou = 1, 1, 1, 1, 1
for qian in range(2, 6):
for sun in range(2, 6):
if sun == qian: continue
for li in range(2, 6):
if li == qian or li == sun:
continue
zhou = 15 - zhao - qian - sun - li
if adj(zhao, qian) or left(qian, sun) or left(qian, li):
continue
if left(sun, qian) or adj(sun, li):
continue
print("%d %d %d %d %d" % (zhao, qian, sun, li, zhou))
| [
"[email protected]"
]
| |
c433d7fe29d312b80fbac7fc3888a4c7c7dd2223 | 39c861da8f362874baac3f7e4aab089b18125dab | /ghostwriter/modules/exceptions.py | be1d30a317dbe958adf73cba0a39823fd06cbd43 | [
"BSD-3-Clause"
]
| permissive | chrismaddalena/Ghostwriter | 47cdc2111695e19335430326cdf4f880b728be22 | f197be35497ae97c6b90ba17a820ec04e4254c53 | refs/heads/master | 2022-07-09T02:14:12.382165 | 2022-06-07T23:19:15 | 2022-06-07T23:19:15 | 202,816,974 | 3 | 0 | BSD-3-Clause | 2022-03-09T21:07:37 | 2019-08-17T00:37:18 | Python | UTF-8 | Python | false | false | 761 | py | """This contains all of the custom exceptions for the Ghostwriter application."""
class MissingTemplate(Exception):
"""
Exception raised when a report template is missing for a report.
**Attributes**
``message``
Error message to be displayed
"""
def __init__(self, message="No report template selected"):
self.message = message
super().__init__(self.message)
class InvalidFilterValue(Exception):
"""
Exception raised when an invalid value is passed to a report template filter.
**Attributes**
``message``
Error message to be displayed
"""
def __init__(self, message="Invalid value provided to filter"):
self.message = message
super().__init__(self.message)
| [
"[email protected]"
]
| |
2c6f44399105c6eaf015fa79e82a8722f392705f | e13c98f36c362717fdf22468b300321802346ef5 | /home/migrations/0005_auto_20161130_1514.py | a8eda78bd3708cdb4cd0d223a5be51a7bbc35b45 | []
| no_license | alexmon1989/libraries_portal | 2415cc49de33459266a9f18ed8bb34ac99d3eb7c | 277081e09f6347c175775337bffba074a35f3b92 | refs/heads/master | 2021-01-23T07:25:53.884795 | 2018-12-25T14:29:29 | 2018-12-25T14:29:29 | 80,501,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-30 13:14
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('home', '0004_auto_20161130_1402'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='city',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='home.City', verbose_name='Город'),
),
]
| [
"[email protected]"
]
| |
bbb437e84e29a7a57b6e783426de789e1c3f6ad7 | 4cb288c8b3274b9dc7959ca3bc2d5e4b3bf04618 | /python/ccxt/async_support/bitopro.py | 611f663bd07e3270ce070643f4ab02e0aff6649b | [
"MIT"
]
| permissive | yijixiuxin/ccxt | 7537f73148472efc912f3997040e373cabf2ae0c | d71cd424b9d19b82f2234d8be55dacf311e01a31 | refs/heads/master | 2022-10-01T18:39:29.356725 | 2022-09-20T21:28:02 | 2022-09-20T21:28:02 | 168,174,277 | 0 | 0 | MIT | 2019-01-29T15:05:10 | 2019-01-29T15:05:10 | null | UTF-8 | Python | false | false | 62,980 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
import hashlib
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class bitopro(Exchange):
def describe(self):
return self.deep_extend(super(bitopro, self).describe(), {
'id': 'bitopro',
'name': 'BitoPro',
'countries': ['TW'], # Taiwan
'version': 'v3',
'rateLimit': 100,
'pro': True,
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'cancelAllOrders': True,
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'editOrder': False,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchDepositAddress': False,
'fetchDeposits': True,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchMarginMode': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenInterestHistory': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': False,
'fetchOrderTrades': False,
'fetchPositionMode': False,
'fetchPositions': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': False,
'fetchTrades': True,
'fetchTradingFee': False,
'fetchTradingFees': True,
'fetchTransactionFees': False,
'fetchTransactions': False,
'fetchTransfer': False,
'fetchTransfers': False,
'fetchWithdrawal': True,
'fetchWithdrawals': True,
'setLeverage': False,
'setMarginMode': False,
'transfer': False,
'withdraw': True,
},
'timeframes': {
'1m': '1m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'3h': '3h',
'6h': '6h',
'12h': '12h',
'1d': '1d',
'1w': '1w',
'1M': '1M',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/158227251-3a92a220-9222-453c-9277-977c6677fe71.jpg',
'api': {
'rest': 'https://api.bitopro.com/v3',
},
'www': 'https://www.bitopro.com',
'doc': [
'https://github.com/bitoex/bitopro-offical-api-docs/blob/master/v3-1/rest-1/rest.md',
],
'fees': 'https://www.bitopro.com/fees',
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'api': {
'public': {
'get': [
'order-book/{pair}',
'tickers',
'tickers/{pair}',
'trades/{pair}',
'provisioning/currencies',
'provisioning/trading-pairs',
'provisioning/limitations-and-fees',
'trading-history/{pair}',
],
},
'private': {
'get': [
'accounts/balance',
'orders/history',
'orders/all/{pair}',
'orders/trades/{pair}',
'orders/{pair}/{orderId}',
'wallet/withdraw/{currency}/{serial}',
'wallet/withdraw/{currency}/id/{id}',
'wallet/depositHistory/{currency}',
'wallet/withdrawHistory/{currency}',
],
'post': [
'orders/{pair}',
'orders/batch',
'wallet/withdraw/{currency}',
],
'put': [
'orders',
],
'delete': [
'orders/{pair}/{id}',
'orders/all',
'orders/{pair}',
],
},
},
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'maker': self.parse_number('0.001'),
'taker': self.parse_number('0.002'),
'tiers': {
'taker': [
[self.parse_number('0'), self.parse_number('0.002')],
[self.parse_number('3000000'), self.parse_number('0.00194')],
[self.parse_number('5000000'), self.parse_number('0.0015')],
[self.parse_number('30000000'), self.parse_number('0.0014')],
[self.parse_number('300000000'), self.parse_number('0.0013')],
[self.parse_number('550000000'), self.parse_number('0.0012')],
[self.parse_number('1300000000'), self.parse_number('0.0011')],
],
'maker': [
[self.parse_number('0'), self.parse_number('0.001')],
[self.parse_number('3000000'), self.parse_number('0.00097')],
[self.parse_number('5000000'), self.parse_number('0.0007')],
[self.parse_number('30000000'), self.parse_number('0.0006')],
[self.parse_number('300000000'), self.parse_number('0.0005')],
[self.parse_number('550000000'), self.parse_number('0.0004')],
[self.parse_number('1300000000'), self.parse_number('0.0003')],
],
},
},
},
'options': {
'networks': {
'ERC20': 'ERC20',
'ETH': 'ERC20',
'TRX': 'TRX',
'TRC20': 'TRX',
},
},
'precisionMode': TICK_SIZE,
'exceptions': {
'exact': {
'Unsupported currency.': BadRequest, # {"error":"Unsupported currency."}
'Unsupported order type': BadRequest, # {"error":"Unsupported order type"}
'Invalid body': BadRequest, # {"error":"Invalid body"}
'Invalid Signature': AuthenticationError, # {"error":"Invalid Signature"}
'Address not in whitelist.': BadRequest,
},
'broad': {
'Invalid amount': InvalidOrder, # {"error":"Invalid amount 0.0000000001, decimal limit is 8."}
'Balance for ': InsufficientFunds, # {"error":"Balance for eth not enough, only has 0, but ordered 0.01."}
'Invalid ': BadRequest, # {"error":"Invalid price -1."}
'Wrong parameter': BadRequest, # {"error":"Wrong parameter: from"}
},
},
'commonCurrencies': {
},
})
async def fetch_currencies(self, params={}):
"""
fetches all available currencies on an exchange
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: an associative dictionary of currencies
"""
response = await self.publicGetProvisioningCurrencies(params)
currencies = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "currency":"eth",
# "withdrawFee":"0.007",
# "minWithdraw":"0.001",
# "maxWithdraw":"1000",
# "maxDailyWithdraw":"2000",
# "withdraw":true,
# "deposit":true,
# "depositConfirmation":"12"
# }
# ]
# }
#
result = {}
for i in range(0, len(currencies)):
currency = currencies[i]
currencyId = self.safe_string(currency, 'currency')
code = self.safe_currency_code(currencyId)
deposit = self.safe_value(currency, 'deposit')
withdraw = self.safe_value(currency, 'withdraw')
fee = self.safe_number(currency, 'withdrawFee')
withdrawMin = self.safe_number(currency, 'minWithdraw')
withdrawMax = self.safe_number(currency, 'maxWithdraw')
limits = {
'withdraw': {
'min': withdrawMin,
'max': withdrawMax,
},
'amount': {
'min': None,
'max': None,
},
}
result[code] = {
'id': currencyId,
'code': code,
'info': currency,
'type': None,
'name': None,
'active': deposit and withdraw,
'deposit': deposit,
'withdraw': withdraw,
'fee': fee,
'precision': None,
'limits': limits,
}
return result
async def fetch_markets(self, params={}):
"""
retrieves data on all markets for bitopro
:param dict params: extra parameters specific to the exchange api endpoint
:returns [dict]: an array of objects representing market data
"""
response = await self.publicGetProvisioningTradingPairs()
markets = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "pair":"shib_twd",
# "base":"shib",
# "quote":"twd",
# "basePrecision":"8",
# "quotePrecision":"6",
# "minLimitBaseAmount":"100000",
# "maxLimitBaseAmount":"5500000000",
# "minMarketBuyQuoteAmount":"1000",
# "orderOpenLimit":"200",
# "maintain":false,
# "orderBookQuotePrecision":"6",
# "orderBookQuoteScaleLevel":"5"
# }
# ]
# }
#
result = []
for i in range(0, len(markets)):
market = markets[i]
active = not self.safe_value(market, 'maintain')
id = self.safe_string(market, 'pair')
uppercaseId = id.upper()
baseId = self.safe_string(market, 'base')
quoteId = self.safe_string(market, 'quote')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
limits = {
'amount': {
'min': self.safe_number(market, 'minLimitBaseAmount'),
'max': self.safe_number(market, 'maxLimitBaseAmount'),
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
'leverage': {
'min': None,
'max': None,
},
}
result.append({
'id': id,
'uppercaseId': uppercaseId,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': base,
'quoteId': quote,
'settle': None,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'derivative': False,
'contract': False,
'linear': None,
'inverse': None,
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'limits': limits,
'precision': {
'price': self.parse_number(self.parse_precision(self.safe_string(market, 'quotePrecision'))),
'amount': self.parse_number(self.parse_precision(self.safe_string(market, 'basePrecision'))),
},
'active': active,
'info': market,
})
return result
def parse_ticker(self, ticker, market=None):
#
# {
# "pair":"btc_twd",
# "lastPrice":"1182449.00000000",
# "isBuyer":false,
# "priceChange24hr":"-1.99",
# "volume24hr":"9.13089740",
# "high24hr":"1226097.00000000",
# "low24hr":"1181000.00000000"
# }
#
marketId = self.safe_string(ticker, 'pair')
market = self.safe_market(marketId, market)
symbol = self.safe_string(market, 'symbol')
return self.safe_ticker({
'symbol': symbol,
'timestamp': None,
'datetime': None,
'high': self.safe_string(ticker, 'high24hr'),
'low': self.safe_string(ticker, 'low24hr'),
'bid': None,
'bidVolume': None,
'ask': None,
'askVolume': None,
'vwap': None,
'open': None,
'close': self.safe_string(ticker, 'lastPrice'),
'last': self.safe_string(ticker, 'lastPrice'),
'previousClose': None,
'change': None,
'percentage': self.safe_string(ticker, 'priceChange24hr'),
'average': None,
'baseVolume': self.safe_string(ticker, 'volume24hr'),
'quoteVolume': None,
'info': ticker,
}, market)
async def fetch_ticker(self, symbol, params={}):
"""
fetches a price ticker, a statistical calculation with the information calculated over the past 24 hours for a specific market
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: a `ticker structure <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
response = await self.publicGetTickersPair(self.extend(request, params))
ticker = self.safe_value(response, 'data', {})
#
# {
# "data":{
# "pair":"btc_twd",
# "lastPrice":"1182449.00000000",
# "isBuyer":false,
# "priceChange24hr":"-1.99",
# "volume24hr":"9.13089740",
# "high24hr":"1226097.00000000",
# "low24hr":"1181000.00000000"
# }
# }
#
return self.parse_ticker(ticker, market)
async def fetch_tickers(self, symbols=None, params={}):
"""
fetches price tickers for multiple markets, statistical calculations with the information calculated over the past 24 hours each market
:param [str]|None symbols: unified symbols of the markets to fetch the ticker for, all market tickers are returned if not assigned
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: an array of `ticker structures <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
await self.load_markets()
response = await self.publicGetTickers()
tickers = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "pair":"xrp_twd",
# "lastPrice":"21.26110000",
# "isBuyer":false,
# "priceChange24hr":"-6.53",
# "volume24hr":"102846.47084802",
# "high24hr":"23.24460000",
# "low24hr":"21.13730000"
# }
# ]
# }
#
return self.parse_tickers(tickers, symbols)
async def fetch_order_book(self, symbol, limit=None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
:param str symbol: unified symbol of the market to fetch the order book for
:param int|None limit: the maximum amount of order book entries to return
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: A dictionary of `order book structures <https://docs.ccxt.com/en/latest/manual.html#order-book-structure>` indexed by market symbols
"""
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.publicGetOrderBookPair(self.extend(request, params))
#
# {
# "bids":[
# {
# "price":"1175271",
# "amount":"0.00022804",
# "count":1,
# "total":"0.00022804"
# }
# ],
# "asks":[
# {
# "price":"1176906",
# "amount":"0.0496",
# "count":1,
# "total":"0.0496"
# }
# ]
# }
#
return self.parse_order_book(response, market['symbol'], None, 'bids', 'asks', 'price', 'amount')
def parse_trade(self, trade, market):
#
# fetchTrades
# {
# "timestamp":1644651458,
# "price":"1180785.00000000",
# "amount":"0.00020000",
# "isBuyer":false
# }
#
# fetchMyTrades
# {
# "tradeId":"5685030251",
# "orderId":"9669168142",
# "price":"11821.8",
# "action":"SELL",
# "baseAmount":"0.01",
# "quoteAmount":"118.218",
# "fee":"0.236436",
# "feeSymbol":"BNB",
# "isTaker":true,
# "timestamp":1644905714862,
# "createdTimestamp":1644905714862
# }
#
id = self.safe_string(trade, 'tradeId')
orderId = self.safe_string(trade, 'orderId')
timestamp = None
if id is None:
timestamp = self.safe_timestamp(trade, 'timestamp')
else:
timestamp = self.safe_integer(trade, 'timestamp')
marketId = self.safe_string(trade, 'pair')
market = self.safe_market(marketId, market)
symbol = self.safe_string(market, 'symbol')
price = self.safe_string(trade, 'price')
type = self.safe_string_lower(trade, 'type')
side = self.safe_string_lower(trade, 'action')
if side is None:
isBuyer = self.safe_value(trade, 'isBuyer')
if isBuyer:
side = 'buy'
else:
side = 'sell'
amount = self.safe_string(trade, 'amount')
if amount is None:
amount = self.safe_string(trade, 'baseAmount')
fee = None
feeAmount = self.safe_string(trade, 'fee')
feeSymbol = self.safe_currency_code(self.safe_string(trade, 'feeSymbol'))
if feeAmount is not None:
fee = {
'cost': feeAmount,
'currency': feeSymbol,
'rate': None,
}
isTaker = self.safe_value(trade, 'isTaker')
takerOrMaker = None
if isTaker is not None:
if isTaker:
takerOrMaker = 'taker'
else:
takerOrMaker = 'maker'
return self.safe_trade({
'id': id,
'info': trade,
'order': orderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'takerOrMaker': takerOrMaker,
'type': type,
'side': side,
'price': price,
'amount': amount,
'cost': None,
'fee': fee,
}, market)
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
"""
get the list of most recent trades for a particular symbol
:param str symbol: unified symbol of the market to fetch trades for
:param int|None since: timestamp in ms of the earliest trade to fetch
:param int|None limit: the maximum amount of trades to fetch
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
response = await self.publicGetTradesPair(self.extend(request, params))
trades = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "timestamp":1644651458,
# "price":"1180785.00000000",
# "amount":"0.00020000",
# "isBuyer":false
# }
# ]
# }
#
return self.parse_trades(trades, market, since, limit)
async def fetch_trading_fees(self, params={}):
"""
fetch the trading fees for multiple markets
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: a dictionary of `fee structures <https://docs.ccxt.com/en/latest/manual.html#fee-structure>` indexed by market symbols
"""
await self.load_markets()
response = await self.publicGetProvisioningLimitationsAndFees(params)
tradingFeeRate = self.safe_value(response, 'tradingFeeRate', {})
first = self.safe_value(tradingFeeRate, 0)
#
# {
# "tradingFeeRate":[
# {
# "rank":0,
# "twdVolumeSymbol":"\u003c",
# "twdVolume":"3000000",
# "bitoAmountSymbol":"\u003c",
# "bitoAmount":"7500",
# "makerFee":"0.001",
# "takerFee":"0.002",
# "makerBitoFee":"0.0008",
# "takerBitoFee":"0.0016"
# }
# ],
# "orderFeesAndLimitations":[
# {
# "pair":"BTC/TWD",
# "minimumOrderAmount":"0.0001",
# "minimumOrderAmountBase":"BTC",
# "minimumOrderNumberOfDigits":"0"
# }
# ],
# "restrictionsOfWithdrawalFees":[
# {
# "currency":"TWD",
# "fee":"15",
# "minimumTradingAmount":"100",
# "maximumTradingAmount":"1000000",
# "dailyCumulativeMaximumAmount":"2000000",
# "remarks":"",
# "protocol":""
# }
# ],
# "cryptocurrencyDepositFeeAndConfirmation":[
# {
# "currency":"TWD",
# "generalDepositFees":"0",
# "blockchainConfirmationRequired":""
# }
# ],
# "ttCheckFeesAndLimitationsLevel1":[
# {
# "currency":"TWD",
# "redeemDailyCumulativeMaximumAmount":"",
# "generateMinimumTradingAmount":"",
# "generateMaximumTradingAmount":"",
# "generateDailyCumulativeMaximumAmount":""
# }
# ],
# "ttCheckFeesAndLimitationsLevel2":[
# {
# "currency":"TWD",
# "redeemDailyCumulativeMaximumAmount":"20000000",
# "generateMinimumTradingAmount":"30",
# "generateMaximumTradingAmount":"10000000",
# "generateDailyCumulativeMaximumAmount":"10000000"
# }
# ]
# }
#
result = {}
maker = self.safe_number(first, 'makerFee')
taker = self.safe_number(first, 'takerFee')
for i in range(0, len(self.symbols)):
symbol = self.symbols[i]
result[symbol] = {
'info': first,
'symbol': symbol,
'maker': maker,
'taker': taker,
'percentage': True,
'tierBased': True,
}
return result
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
return [
self.safe_integer(ohlcv, 'timestamp'),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number(ohlcv, 'volume'),
]
async def fetch_ohlcv(self, symbol, timeframe='5m', since=None, limit=None, params={}):
"""
fetches historical candlestick data containing the open, high, low, and close price, and the volume of a market
:param str symbol: unified symbol of the market to fetch OHLCV data for
:param str timeframe: the length of time each candle represents
:param int|None since: timestamp in ms of the earliest candle to fetch
:param int|None limit: the maximum amount of candles to fetch
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [[int]]: A list of candles ordered as timestamp, open, high, low, close, volume
"""
await self.load_markets()
market = self.market(symbol)
resolution = self.timeframes[timeframe]
request = {
'pair': market['id'],
'resolution': resolution,
}
# we need to have a limit argument because "to" and "from" are required
if limit is None:
limit = 500
timeframeInSeconds = self.parse_timeframe(timeframe)
alignedSince = None
if since is None:
request['to'] = self.seconds()
request['from'] = request['to'] - (limit * timeframeInSeconds)
else:
timeframeInMilliseconds = timeframeInSeconds * 1000
alignedSince = int(math.floor(since / timeframeInMilliseconds)) * timeframeInMilliseconds
request['from'] = int(math.floor(since / 1000))
request['to'] = self.sum(request['from'], limit * timeframeInSeconds)
response = await self.publicGetTradingHistoryPair(self.extend(request, params))
data = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "timestamp":1644581100000,
# "open":"1214737",
# "high":"1215110",
# "low":"1214737",
# "close":"1215110",
# "volume":"0.08423959"
# }
# ]
# }
#
sparse = self.parse_ohlcvs(data, market, timeframe, since, limit)
return self.insert_missing_candles(sparse, timeframeInSeconds, alignedSince, limit)
def insert_missing_candles(self, candles, distance, since, limit):
# the exchange doesn't send zero volume candles so we emulate them instead
# otherwise sending a limit arg leads to unexpected results
length = len(candles)
if length == 0:
return candles
result = []
copyFrom = candles[0]
timestamp = None
if since is None:
timestamp = copyFrom[0]
else:
timestamp = since
i = 0
candleLength = len(candles)
resultLength = 0
while((resultLength < limit) and (i < candleLength)):
candle = candles[i]
if candle[0] == timestamp:
result.append(candle)
i = self.sum(i, 1)
else:
copy = self.array_concat([], copyFrom)
copy[0] = timestamp
# set open, high, low to close
copy[1] = copy[4]
copy[2] = copy[4]
copy[3] = copy[4]
copy[5] = self.parse_number('0')
result.append(copy)
timestamp = self.sum(timestamp, distance * 1000)
resultLength = len(result)
copyFrom = result[resultLength - 1]
return result
def parse_balance(self, response):
#
# [{
# "currency":"twd",
# "amount":"0",
# "available":"0",
# "stake":"0",
# "tradable":true
# }]
#
result = {
'info': response,
}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
amount = self.safe_string(balance, 'amount')
available = self.safe_string(balance, 'available')
account = {
'free': available,
'total': amount,
}
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
"""
query for balance and get the amount of funds available for trading or funds locked in orders
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: a `balance structure <https://docs.ccxt.com/en/latest/manual.html?#balance-structure>`
"""
await self.load_markets()
response = await self.privateGetAccountsBalance(params)
balances = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "currency":"twd",
# "amount":"0",
# "available":"0",
# "stake":"0",
# "tradable":true
# }
# ]
# }
#
return self.parse_balance(balances)
def parse_order_status(self, status):
statuses = {
'-1': 'open',
'0': 'open',
'1': 'open',
'2': 'closed',
'3': 'closed',
'4': 'canceled',
}
return self.safe_string(statuses, status, None)
def parse_order(self, order, market=None):
#
# createOrder
# {
# orderId: '2220595581',
# timestamp: '1644896744886',
# action: 'SELL',
# amount: '0.01',
# price: '15000',
# timeInForce: 'GTC'
# }
#
# fetchOrder
# {
# "id":"8777138788",
# "pair":"bnb_twd",
# "price":"16000",
# "avgExecutionPrice":"0",
# "action":"SELL",
# "type":"LIMIT",
# "timestamp":1644899002598,
# "status":4,
# "originalAmount":"0.01",
# "remainingAmount":"0.01",
# "executedAmount":"0",
# "fee":"0",
# "feeSymbol":"twd",
# "bitoFee":"0",
# "total":"0",
# "seq":"BNBTWD548774666",
# "timeInForce":"GTC",
# "createdTimestamp":1644898944074,
# "updatedTimestamp":1644899002598
# }
#
id = self.safe_string_2(order, 'id', 'orderId')
timestamp = self.safe_integer_2(order, 'timestamp', 'createdTimestamp')
side = self.safe_string(order, 'action')
side = side.lower()
amount = self.safe_string_2(order, 'amount', 'originalAmount')
price = self.safe_string(order, 'price')
marketId = self.safe_string(order, 'pair')
market = self.safe_market(marketId, market, '_')
symbol = self.safe_string(market, 'symbol')
orderStatus = self.safe_string(order, 'status')
status = self.parse_order_status(orderStatus)
type = self.safe_string_lower(order, 'type')
average = self.safe_string(order, 'avgExecutionPrice')
filled = self.safe_string(order, 'executedAmount')
remaining = self.safe_string(order, 'remainingAmount')
timeInForce = self.safe_string(order, 'timeInForce')
fee = None
feeAmount = self.safe_string(order, 'fee')
feeSymbol = self.safe_currency_code(self.safe_string(order, 'feeSymbol'))
if Precise.string_gt(feeAmount, '0'):
fee = {
'currency': feeSymbol,
'cost': feeAmount,
}
return self.safe_order({
'id': id,
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': self.safe_integer(order, 'updatedTimestamp'),
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'amount': amount,
'cost': None,
'average': average,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': None,
'info': order,
}, market)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
"""
create a trade order
:param str symbol: unified symbol of the market to create an order in
:param str type: 'market' or 'limit'
:param str side: 'buy' or 'sell'
:param float amount: how much of currency you want to trade in units of base currency
:param float|None price: the price at which the order is to be fullfilled, in units of the quote currency, ignored in market orders
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: an `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'type': type,
'pair': market['id'],
'action': side,
'amount': self.amount_to_precision(symbol, amount),
'timestamp': self.milliseconds(),
}
orderType = type.upper()
if orderType == 'LIMIT':
request['price'] = self.price_to_precision(symbol, price)
if orderType == 'STOP_LIMIT':
request['price'] = self.price_to_precision(symbol, price)
stopPrice = self.safe_value_2(params, 'triggerPrice', 'stopPrice')
params = self.omit(params, ['triggerPrice', 'stopPrice'])
if stopPrice is None:
raise InvalidOrder(self.id + ' createOrder() requires a stopPrice parameter for ' + orderType + ' orders')
else:
request['stopPrice'] = self.price_to_precision(symbol, stopPrice)
condition = self.safe_string(params, 'condition')
if condition is None:
raise InvalidOrder(self.id + ' createOrder() requires a condition parameter for ' + orderType + ' orders')
else:
request['condition'] = condition
response = await self.privatePostOrdersPair(self.extend(request, params), params)
#
# {
# orderId: '2220595581',
# timestamp: '1644896744886',
# action: 'SELL',
# amount: '0.01',
# price: '15000',
# timeInForce: 'GTC'
# }
#
return self.parse_order(response, market)
async def cancel_order(self, id, symbol=None, params={}):
"""
cancels an open order
:param str id: order id
:param str symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires the symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'id': id,
'pair': market['id'],
}
response = await self.privateDeleteOrdersPairId(self.extend(request, params))
#
# {
# "orderId":"8777138788",
# "action":"SELL",
# "timestamp":1644899002465,
# "price":"16000",
# "amount":"0.01"
# }
#
return self.parse_order(response, market)
async def cancel_orders(self, ids, symbol=None, params={}):
"""
cancel multiple orders
:param [str] ids: order ids
:param str symbol: unified market symbol
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: an list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
id = market['uppercaseId']
request = {}
request[id] = ids
response = await self.privatePutOrders(self.extend(request, params))
#
# {
# "data":{
# "BNB_TWD":[
# "5236347105",
# "359488711"
# ]
# }
# }
#
return response
async def cancel_all_orders(self, symbol=None, params={}):
"""
cancel all open orders
:param str|None symbol: unified market symbol, only orders in the market of self symbol are cancelled when symbol is not None
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
request = {
# 'pair': market['id'], # optional
}
# privateDeleteOrdersAll or privateDeleteOrdersPair
method = self.safe_string(self.options, 'privateDeleteOrdersPair', 'privateDeleteOrdersAll')
if symbol is not None:
market = self.market(symbol)
request['pair'] = market['id']
method = 'privateDeleteOrdersPair'
response = await getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'data', {})
#
# {
# "data":{
# "BNB_TWD":[
# "9515988421",
# "4639130027"
# ]
# }
# }
#
return result
async def fetch_order(self, id, symbol=None, params={}):
"""
fetches information on an order made by the user
:param str symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires the symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'orderId': id,
'pair': market['id'],
}
response = await self.privateGetOrdersPairOrderId(self.extend(request, params))
#
# {
# "id":"8777138788",
# "pair":"bnb_twd",
# "price":"16000",
# "avgExecutionPrice":"0",
# "action":"SELL",
# "type":"LIMIT",
# "timestamp":1644899002598,
# "status":4,
# "originalAmount":"0.01",
# "remainingAmount":"0.01",
# "executedAmount":"0",
# "fee":"0",
# "feeSymbol":"twd",
# "bitoFee":"0",
# "total":"0",
# "seq":"BNBTWD548774666",
# "timeInForce":"GTC",
# "createdTimestamp":1644898944074,
# "updatedTimestamp":1644899002598
# }
#
return self.parse_order(response, market)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetches information on multiple orders made by the user
:param str symbol: unified market symbol of the market orders were made in
:param int|None since: the earliest time in ms to fetch orders for
:param int|None limit: the maximum number of orde structures to retrieve
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders() requires the symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
# 'startTimestamp': 0,
# 'endTimestamp': 0,
# 'statusKind': '',
# 'orderId': '',
}
if since is not None:
request['startTimestamp'] = since
if limit is not None:
request['limit'] = limit
response = await self.privateGetOrdersAllPair(self.extend(request, params), params)
orders = self.safe_value(response, 'data')
if orders is None:
orders = []
#
# {
# "data":[
# {
# "id":"2220595581",
# "pair":"bnb_twd",
# "price":"15000",
# "avgExecutionPrice":"0",
# "action":"SELL",
# "type":"LIMIT",
# "createdTimestamp":1644896744886,
# "updatedTimestamp":1644898706236,
# "status":4,
# "originalAmount":"0.01",
# "remainingAmount":"0.01",
# "executedAmount":"0",
# "fee":"0",
# "feeSymbol":"twd",
# "bitoFee":"0",
# "total":"0",
# "seq":"BNBTWD8540871774",
# "timeInForce":"GTC"
# }
# ]
# }
#
return self.parse_orders(orders, market, since, limit)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
request = {
'statusKind': 'OPEN',
}
return self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetches information on multiple closed orders made by the user
:param str symbol: unified market symbol of the market orders were made in
:param int|None since: the earliest time in ms to fetch orders for
:param int|None limit: the maximum number of orde structures to retrieve
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
request = {
'statusKind': 'DONE',
}
return self.fetch_orders(symbol, since, limit, self.extend(request, params))
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all trades made by the user
:param str symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch trades for
:param int|None limit: the maximum number of trades structures to retrieve
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html#trade-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires the symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'pair': market['id'],
}
response = await self.privateGetOrdersTradesPair(self.extend(request, params))
trades = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "tradeId":"5685030251",
# "orderId":"9669168142",
# "price":"11821.8",
# "action":"SELL",
# "baseAmount":"0.01",
# "quoteAmount":"118.218",
# "fee":"0.236436",
# "feeSymbol":"BNB",
# "isTaker":true,
# "timestamp":1644905714862,
# "createdTimestamp":1644905714862
# }
# ]
# }
#
return self.parse_trades(trades, market, since, limit)
def parse_transaction_status(self, status):
states = {
'COMPLETE': 'ok',
'INVALID': 'failed',
'PROCESSING': 'pending',
'WAIT_PROCESS': 'pending',
'FAILED': 'failed',
'EXPIRED': 'failed',
'CANCELLED': 'failed',
'EMAIL_VERIFICATION': 'pending',
'WAIT_CONFIRMATION': 'pending',
}
return self.safe_string(states, status, status)
def parse_transaction(self, transaction, currency=None):
#
# fetchDeposits
# {
# "serial":"20220214X766799",
# "timestamp":"1644833015053",
# "address":"bnb1xml62k5a9dcewgc542fha75fyxdcp0zv8eqfsh",
# "amount":"0.20000000",
# "fee":"0.00000000",
# "total":"0.20000000",
# "status":"COMPLETE",
# "txid":"A3CC4F6828CC752B9F3737F48B5826B9EC2857040CB5141D0CC955F7E53DB6D9",
# "message":"778553959",
# "protocol":"MAIN",
# "id":"2905906537"
# }
#
# fetchWithdrawals or fetchWithdraw
# {
# "serial":"20220215BW14069838",
# "timestamp":"1644907716044",
# "address":"TKrwMaZaGiAvtXCFT41xHuusNcs4LPWS7w",
# "amount":"8.00000000",
# "fee":"2.00000000",
# "total":"10.00000000",
# "status":"COMPLETE",
# "txid":"50bf250c71a582f40cf699fb58bab978437ea9bdf7259ff8072e669aab30c32b",
# "protocol":"TRX",
# "id":"9925310345"
# }
#
# withdraw
# {
# "serial":"20220215BW14069838",
# "currency":"USDT",
# "protocol":"TRX",
# "address":"TKrwMaZaGiAvtXCFT41xHuusNcs4LPWS7w",
# "amount":"8",
# "fee":"2",
# "total":"10"
# }
#
currencyId = self.safe_string(transaction, 'coin')
code = self.safe_currency_code(currencyId, currency)
id = self.safe_string(transaction, 'serial')
txId = self.safe_string(transaction, 'txid')
timestamp = self.safe_integer(transaction, 'timestamp')
amount = self.safe_number(transaction, 'total')
address = self.safe_string(transaction, 'address')
tag = self.safe_string(transaction, 'message')
status = self.safe_string(transaction, 'status')
fee = self.safe_number(transaction, 'fee')
return {
'info': transaction,
'id': id,
'txid': txId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'network': None,
'addressFrom': None,
'address': address,
'addressTo': address,
'tagFrom': None,
'tag': tag,
'tagTo': tag,
'type': None,
'amount': amount,
'currency': code,
'status': self.parse_transaction_status(status),
'updated': None,
'fee': {
'currency': code,
'cost': fee,
'rate': None,
},
}
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
"""
fetch all deposits made to an account
:param str code: unified currency code
:param int|None since: the earliest time in ms to fetch deposits for
:param int|None limit: the maximum number of deposits structures to retrieve
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `transaction structures <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
if code is None:
raise ArgumentsRequired(self.id + ' fetchDeposits() requires the code argument')
await self.load_markets()
currency = self.safe_currency(code)
request = {
'currency': currency['id'],
# 'endTimestamp': 0,
# 'id': '',
# 'statuses': '', # 'ROCESSING,COMPLETE,INVALID,WAIT_PROCESS,CANCELLED,FAILED'
}
if since is not None:
request['startTimestamp'] = since
if limit is not None:
request['limit'] = limit
response = await self.privateGetWalletDepositHistoryCurrency(self.extend(request, params))
result = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "serial":"20220214X766799",
# "timestamp":"1644833015053",
# "address":"bnb1xml62k5a9dcewgc542fha75fyxdcp0zv8eqfsh",
# "amount":"0.20000000",
# "fee":"0.00000000",
# "total":"0.20000000",
# "status":"COMPLETE",
# "txid":"A3CC4F6828CC752B9F3737F48B5826B9EC2857040CB5141D0CC955F7E53DB6D9",
# "message":"778553959",
# "protocol":"MAIN",
# "id":"2905906537"
# }
# ]
# }
#
return self.parse_transactions(result, currency, since, limit, {'type': 'deposit'})
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
"""
fetch all withdrawals made from an account
:param str code: unified currency code
:param int|None since: the earliest time in ms to fetch withdrawals for
:param int|None limit: the maximum number of withdrawals structures to retrieve
:param dict params: extra parameters specific to the bitopro api endpoint
:returns [dict]: a list of `transaction structures <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
if code is None:
raise ArgumentsRequired(self.id + ' fetchWithdrawals() requires the code argument')
await self.load_markets()
currency = self.safe_currency(code)
request = {
'currency': currency['id'],
# 'endTimestamp': 0,
# 'id': '',
# 'statuses': '', # 'PROCESSING,COMPLETE,EXPIRED,INVALID,WAIT_PROCESS,WAIT_CONFIRMATION,EMAIL_VERIFICATION,CANCELLED'
}
if since is not None:
request['startTimestamp'] = since
if limit is not None:
request['limit'] = limit
response = await self.privateGetWalletWithdrawHistoryCurrency(self.extend(request, params))
result = self.safe_value(response, 'data', [])
#
# {
# "data":[
# {
# "serial":"20220215BW14069838",
# "timestamp":"1644907716044",
# "address":"TKrwMaZaGiAvtXCFT41xHuusNcs4LPWS7w",
# "amount":"8.00000000",
# "fee":"2.00000000",
# "total":"10.00000000",
# "status":"COMPLETE",
# "txid":"50bf250c71a582f40cf699fb58bab978437ea9bdf7259ff8072e669aab30c32b",
# "protocol":"TRX",
# "id":"9925310345"
# }
# ]
# }
#
return self.parse_transactions(result, currency, since, limit, {'type': 'withdrawal'})
async def fetch_withdrawal(self, id, code=None, params={}):
"""
fetch data on a currency withdrawal via the withdrawal id
:param str id: withdrawal id
:param str code: unified currency code of the currency withdrawn, default is None
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: a `transaction structure <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
if code is None:
raise ArgumentsRequired(self.id + ' fetchWithdrawal() requires the code argument')
await self.load_markets()
currency = self.safe_currency(code)
request = {
'serial': id,
'currency': currency['id'],
}
response = await self.privateGetWalletWithdrawCurrencySerial(self.extend(request, params))
result = self.safe_value(response, 'data', {})
#
# {
# "data":{
# "serial":"20220215BW14069838",
# "address":"TKrwMaZaGiAvtXCFT41xHuusNcs4LPWS7w",
# "amount":"8.00000000",
# "fee":"2.00000000",
# "total":"10.00000000",
# "status":"COMPLETE",
# "txid":"50bf250c71a582f40cf699fb58bab978437ea9bdf7259ff8072e669aab30c32b",
# "protocol":"TRX",
# "id":"9925310345",
# "timestamp":"1644907716044"
# }
# }
#
return self.parse_transaction(result, currency)
async def withdraw(self, code, amount, address, tag=None, params={}):
"""
make a withdrawal
:param str code: unified currency code
:param float amount: the amount to withdraw
:param str address: the address to withdraw to
:param str|None tag:
:param dict params: extra parameters specific to the bitopro api endpoint
:returns dict: a `transaction structure <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
tag, params = self.handle_withdraw_tag_and_params(tag, params)
await self.load_markets()
self.check_address(address)
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': self.number_to_string(amount),
'address': address,
}
if 'network' in params:
networks = self.safe_value(self.options, 'networks', {})
requestedNetwork = self.safe_string_upper(params, 'network')
params = self.omit(params, ['network'])
networkId = self.safe_string(networks, requestedNetwork)
if networkId is None:
raise ExchangeError(self.id + ' invalid network ' + requestedNetwork)
request['protocol'] = networkId
if tag is not None:
request['message'] = tag
response = await self.privatePostWalletWithdrawCurrency(self.extend(request, params))
result = self.safe_value(response, 'data', {})
#
# {
# "data":{
# "serial":"20220215BW14069838",
# "currency":"USDT",
# "protocol":"TRX",
# "address":"TKrwMaZaGiAvtXCFT41xHuusNcs4LPWS7w",
# "amount":"8",
# "fee":"2",
# "total":"10"
# }
# }
#
return self.parse_transaction(result, currency)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if headers is None:
headers = {}
headers['X-BITOPRO-API'] = 'ccxt'
if api == 'private':
self.check_required_credentials()
if method == 'POST' or method == 'PUT':
body = self.json(params)
payload = self.string_to_base64(body)
signature = self.hmac(payload, self.encode(self.secret), hashlib.sha384)
headers['X-BITOPRO-APIKEY'] = self.apiKey
headers['X-BITOPRO-PAYLOAD'] = payload
headers['X-BITOPRO-SIGNATURE'] = signature
elif method == 'GET' or method == 'DELETE':
if query:
url += '?' + self.urlencode(query)
nonce = self.milliseconds()
rawData = {
'nonce': nonce,
}
rawData = self.json(rawData)
payload = self.string_to_base64(rawData)
signature = self.hmac(payload, self.encode(self.secret), hashlib.sha384)
headers['X-BITOPRO-APIKEY'] = self.apiKey
headers['X-BITOPRO-PAYLOAD'] = payload
headers['X-BITOPRO-SIGNATURE'] = signature
elif api == 'public' and method == 'GET':
if query:
url += '?' + self.urlencode(query)
url = self.urls['api']['rest'] + url
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to the default error handler
if code >= 200 and code < 300:
return
feedback = self.id + ' ' + body
error = self.safe_string(response, 'error')
self.throw_exactly_matched_exception(self.exceptions['exact'], error, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], error, feedback)
raise ExchangeError(feedback) # unknown message
| [
"[email protected]"
]
| |
af515b669ae560887ad5dc37d62f08810616b261 | ee0cd5c10058a0fc2e29b27139b89706755ca53e | /html/shell/firefox.sh | 18f27d1faa6545cd843bcc49ea8e1b4821958b20 | []
| no_license | meetann/finalcloudproject | 44ac1e36b27cedfc43f6f24035f8477f876709c9 | d8bce6f4fe18d4155900caf0f63eae737ae25309 | refs/heads/master | 2020-06-16T10:07:53.544074 | 2019-07-06T11:57:10 | 2019-07-06T11:57:10 | 195,534,054 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | sh | #!/usr/bin/python
import os
os.system('sshpass -p t ssh -X -o StrictHostKeyChecking=no [email protected] firefox')
| [
"[email protected]"
]
| |
c1ecba608b38e7e151190d9428b136119b3a8902 | 3b9b4049a8e7d38b49e07bb752780b2f1d792851 | /src/third_party/skia/gyp/icu.gyp | 4a985032c26d61b2145ef092b2b838626d4a11de | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-public-domain"
]
| permissive | webosce/chromium53 | f8e745e91363586aee9620c609aacf15b3261540 | 9171447efcf0bb393d41d1dc877c7c13c46d8e38 | refs/heads/webosce | 2020-03-26T23:08:14.416858 | 2018-08-23T08:35:17 | 2018-09-20T14:25:18 | 145,513,343 | 0 | 2 | Apache-2.0 | 2019-08-21T22:44:55 | 2018-08-21T05:52:31 | null | UTF-8 | Python | false | false | 3,713 | gyp | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'common_variables.gypi',
],
'variables': {
'component%': 'static_library',
'icu_directory': '../third_party/externals/icu'
},
'targets': [
{
'target_name': 'icuuc',
'type': '<(component)',
'sources': [
'<!@(python find.py ../third_party/externals/icu/source/common "*.c*")'
],
'defines': [
'U_COMMON_IMPLEMENTATION',
'U_HIDE_DATA_SYMBOL',
'U_USING_ICU_NAMESPACE=0',
'HAVE_DLOPEN=0',
'UCONFIG_NO_NON_HTML5_CONVERSION=1',
],
'include_dirs': [ '<(icu_directory)/source/common', ],
'direct_dependent_settings': {
'defines': [
'U_USING_ICU_NAMESPACE=0',
'U_ENABLE_DYLOAD=0',
],
'include_dirs': [ '<(icu_directory)/source/common', ],
'conditions': [
[
'component=="static_library"', {
'defines': [
'U_STATIC_IMPLEMENTATION',
],
}
],
],
},
'cflags': [ '-w' ],
'cflags_cc': [ '-frtti', ],
'conditions': [
[
'component=="static_library"', {
'defines': [ 'U_STATIC_IMPLEMENTATION', ],
}
],
[
'OS == "win"', {
'sources': [
'<(icu_directory)/source/stubdata/stubdata.c',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)',
'files': [ '<(icu_directory)/windows/icudt.dll', ],
},
],
'msvs_disabled_warnings': [4005, 4068, 4244, 4355, 4996, 4267],
'msvs_settings': {
'VCCLCompilerTool': {
'AdditionalOptions': [ '/EHsc', ],
},
},
'configurations': {
'Debug': {
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeTypeInfo': 'true', # /GR
},
},
},
'Release': {
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeTypeInfo': 'true', # /GR
},
},
},
},
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
'advapi32.lib',
],
},
},
},
}
],
[
'OS == "win" and skia_clang_build', {
'msvs_settings': {
'VCCLCompilerTool': {
'AdditionalOptions': [
# See http://bugs.icu-project.org/trac/ticket/11122
'-Wno-inline-new-delete',
'-Wno-implicit-exception-spec-mismatch',
],
},
},
}
],
[
'skia_os == "android"', {
'sources': [ '<(icu_directory)/android/icudtl_dat.S', ],
}
],
[
'skia_os == "linux"', {
'sources': [ '<(icu_directory)/linux/icudtl_dat.S', ],
}
],
[
'skia_os == "mac"', {
'sources': [ '<(icu_directory)/mac/icudtl_dat.S', ],
'xcode_settings': {
'GCC_ENABLE_CPP_RTTI': 'YES', # -frtti
'WARNING_CFLAGS': [ '-w' ],
},
}
],
], # conditions
},
], # targets
}
| [
"[email protected]"
]
| |
1e1d5ccfdb2caa614c32a09ee07729393624758c | 4c672231bd8b7c23bd5773ef990404cc3146712a | /shipmaster/server/celery.py | 8e24f72855c7e156d14e3e37290140aeabcf16b0 | [
"BSD-3-Clause"
]
| permissive | AzureCloudMonk/shipmaster | b0e82f93308ecc829e6f6b3cb3156f11dcfbadd4 | cf596be7ea689c26c4bf47acb67dfd15169d3c46 | refs/heads/master | 2020-11-30T01:51:32.010852 | 2018-03-03T21:47:17 | 2018-03-03T21:47:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'shipmaster.server.settings')
from celery import Celery
from django.conf import settings
app = Celery('shipmaster.server')
app.config_from_object(settings)
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
| [
"[email protected]"
]
| |
60a9319cb5e51a72ea6172acb56753d27d908782 | 9aa52f7e5902ea8f4a2810809218d9631446345d | /backend/course/api/v1/serializers.py | 94b376e43c63bba2216fc46a5939adf50d3f51d9 | []
| no_license | crowdbotics-apps/merchandising-plays-21542 | e662e42b8766a2fc24d6e0ab926580de0b580461 | c0298b28a45a617b88984d074af4a69f4ea00700 | refs/heads/master | 2022-12-29T10:31:41.304017 | 2020-10-15T18:39:00 | 2020-10-15T18:39:00 | 304,412,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,622 | py | from rest_framework import serializers
from course.models import (
Recording,
Event,
Subscription,
Course,
Group,
Module,
PaymentMethod,
SubscriptionType,
Enrollment,
Lesson,
Category,
)
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = "__all__"
class SubscriptionTypeSerializer(serializers.ModelSerializer):
class Meta:
model = SubscriptionType
fields = "__all__"
class RecordingSerializer(serializers.ModelSerializer):
class Meta:
model = Recording
fields = "__all__"
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = "__all__"
class EventSerializer(serializers.ModelSerializer):
class Meta:
model = Event
fields = "__all__"
class CourseSerializer(serializers.ModelSerializer):
class Meta:
model = Course
fields = "__all__"
class ModuleSerializer(serializers.ModelSerializer):
class Meta:
model = Module
fields = "__all__"
class LessonSerializer(serializers.ModelSerializer):
class Meta:
model = Lesson
fields = "__all__"
class PaymentMethodSerializer(serializers.ModelSerializer):
class Meta:
model = PaymentMethod
fields = "__all__"
class EnrollmentSerializer(serializers.ModelSerializer):
class Meta:
model = Enrollment
fields = "__all__"
class SubscriptionSerializer(serializers.ModelSerializer):
class Meta:
model = Subscription
fields = "__all__"
| [
"[email protected]"
]
| |
dd4c5f0cf3d049124539bf2e96145945474a60c3 | 389569a591284a2adcdc38046114e7b1038afd94 | /python-script/trax/main.py | 9c8e8741e2a3e128b672f4596ae761a0f61aea50 | []
| no_license | xytysingle/AnnotationTool | b797daf2fd472f602341b16f24fb1ed9b702aef1 | a217d4376ceee739e0d8c43515c403133982e86e | refs/heads/master | 2020-04-11T18:16:10.438919 | 2019-07-31T10:21:18 | 2019-07-31T10:21:18 | 161,992,153 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,827 | py | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.command import Command
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException, NoSuchElementException
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from selenium.webdriver import Firefox
from selenium.webdriver.support.events import EventFiringWebDriver, AbstractEventListener
from selenium.webdriver.common.action_chains import ActionChains
import time
import requests
import json
import os
import math
from urllib import request
def login():
global lastPageScenceId
getLogin_url = 'https://services.trax-cloud.cn'
username = wait.until(EC.presence_of_element_located((By.NAME, "username")))
# username = browser.find_element_by_name("username")
# submit_next = browser.find_element_by_name("login")
submit_next = wait.until(EC.presence_of_element_located((By.NAME, "login")))
username.clear()
username.send_keys("[email protected]")
time.sleep(1)
submit_next.click()
# password_input = browser.find_element_by_name("password")
# submit_login = browser.find_element_by_name("login")
password_input = wait.until(EC.presence_of_element_located((By.NAME, "password")))
submit_login = wait.until(EC.presence_of_element_located((By.NAME, "login")))
password_input.clear()
password_input.send_keys("Trax12345")
time.sleep(1)
submit_login.click()
Explorer = wait.until(EC.presence_of_element_located((By.XPATH, "/html/body/ui-view/div/ui-view/div/div/div[1]/div[2]/a")))
Explorer.click()
# Explorer = browser.find_element_by_xpath("/html/body/ui-view/div/ui-view/div/div/div[1]/div[2]/a").click()
# /html/body/ui-view/div/ui-view/div/div/div[1]/div[2]/a
Scenes = browser.find_element_by_xpath("/html/body/ui-view/div/ui-view/ui-view/div/div[2]/div[2]").click()
DateRange = wait.until(EC.presence_of_element_located((By.XPATH, "/html/body/ui-view/div/ui-view/ui-view/ui-view/div/div[1]/div/ui-view/div/div/trax-date-picker/div/div"))).click()
# https://services.trax-cloud.cn/trax-one/api/projects/swirecn/explore/scenes/all/?limit=200&from=2019-02-01&to=2019-02-02&direction=first
FromDate = wait.until(EC.presence_of_element_located((By.XPATH, "/html/body/ui-view/div/ui-view/ui-view/ui-view/div/div[1]/div/ui-view/div/div/trax-date-picker/div/div[2]/div[1]/input[1]")))
ToDate = wait.until(EC.presence_of_element_located((By.XPATH, "/html/body/ui-view/div/ui-view/ui-view/ui-view/div/div[1]/div/ui-view/div/div/trax-date-picker/div/div[2]/div[1]/input[2]")))
# '12 Mar, 2019' '14 Mar, 2019' Mar Feb Jan
FromDate.clear()
FromDate.send_keys("13 Mar, 2019")
ToDate.clear()
ToDate.send_keys("13 Mar, 2019")
time.sleep(1)
Apply_btn = wait.until(EC.presence_of_element_located((By.XPATH, "/html/body/ui-view/div/ui-view/ui-view/ui-view/div/div[1]/div/ui-view/div/div/trax-date-picker/div/div[2]/div[6]/button[2]")))
Apply_btn.click()
#
# page = browser.page_source
# 进入场景
time.sleep(5)
# getFirstScencesList()
getNextScencesList(lastPageScenceId)
def saveCookies():
cookies = browser.get_cookies()
jsonCookies = json.dumps(cookies)
with open('cookies.json', 'w') as f:
f.write(jsonCookies)
print(cookies)
# 获取cookies
def getCookies():
with open('cookies.json', 'r', encoding='utf-8') as f:
listCookies = json.loads(f.read())
cookie = [item["name"] + "=" + item["value"] for item in listCookies]
cookiestr = '; '.join(item for item in cookie)
return cookiestr
# 获取localStorage
def getLocalStorage(key):
# getItem = 'localStorage.getItem("temp2")'
print(key)
res = browser.execute_script("return localStorage.getItem({})".format(key))
return res
def getLabelResults(index):
print('发起请求...')
base_url = 'https://services.trax-cloud.cn/trax-one/api/projects/swirecn/scene/' + str(index)
headers = {
"authentication_token": getLocalStorage("'authentication_token'"),
"authorization_token": getLocalStorage("'authorization_token'"),
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36",
"refresh_token": getLocalStorage("'refresh_token'"),
"cookie": getCookies()
}
try:
rec_response = requests.get(base_url, headers=headers).text
rec_response = json.loads(rec_response)
scence_path = date_path + "/{}".format(str(index))
mkdir(scence_path)
# saveResults(scence_path + "/{}".format(str(index)), rec_response)
saveResultsByJson(scence_path + "/{}".format(str(index)), rec_response)
imagesList = rec_response["probeImages"]
for img in imagesList:
img_url = 'https://services.traxretail.com/images/traxus' + img["probe_image_path"].partition('http://traxus.s3.amazonaws.com')[2] + '/original'
img_name = img["probe_image_path"].split('/')[-1]
try:
saveimage(img_url, scence_path + "/{}.jpeg".format(img_name))
except Exception as e:
print("图片保存失败:", e)
print('爬取成功...')
except:
print("爬取失败")
time.sleep(2)
# print(rec_response)
def goToNextPage():
# span.xp-navigate-description.trax-tst-pagination-paging-summary
page_location = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, 'span.xp-navigate-description.trax-tst-pagination-paging-summary')))
print('page_location:', page_location.text)
wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, 'span[title="next"]'))).click()
# 进入场景
time.sleep(5)
wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, 'a[href^="trax-one/swirecn/explore/scene/"]'))).click()
def getNextSence():
scence_location = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, 'body > ui-view > div > ui-view > ui-view > div > div.is-subheader.is-viewer-subheader.sp-flex-shrink > span.is-subheader-center > ui-view > div > siblings-navigator > span > span > span.items-list.trax-tst-viewer-serializationText')))
wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, 'body > ui-view > div > ui-view > ui-view > div > div.is-subheader.is-viewer-subheader.sp-flex-shrink > span.is-subheader-center > ui-view > div > siblings-navigator > span > span > span.trax-icons.trax-icons-page-back.rotated-to-down-arrow.trax-tst-viewer-next'))).click()
scence_index = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, 'body > ui-view > div > ui-view > ui-view > div > div.is-subheader.is-viewer-subheader.sp-flex-shrink > span.is-subheader-left > ui-view > div > span > span:nth-child(4)')))
print('scence_location:', scence_location.text, 'scence_index:', scence_index.text)
def getFirstScencesList():
global pageNumber
global totalPages
print('发起场景列表请求...')
base_url = 'https://services.trax-cloud.cn/trax-one/api/projects/swirecn/explore/scenes/all/'
headers = {
"authentication_token": getLocalStorage("'authentication_token'"),
"authorization_token": getLocalStorage("'authorization_token'"),
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36",
"refresh_token": getLocalStorage("'refresh_token'"),
"cookie": getCookies()
}
request_data = {
"limit": 200,
"from": from_date,
"to": to_date,
"direction": 'first',
# "last_known_primary_key": last_known_primary_key
}
scencesList_res = requests.get(url=base_url, headers=headers, params=request_data).text
scencesList_res = json.loads(scencesList_res)
saveResultsByJson(date_path +'/' + date + '_' + str(pageNumber + 1), scencesList_res)
print(scencesList_res)
totalItemsCount = scencesList_res["totalItems"]["total_items"]
items = scencesList_res["items"]
print("totalItemsCount:",totalItemsCount, "items:", items)
pageNumber += 1
totalPages = math.ceil(int(totalItemsCount) / 200)
for i in range(0, 200):
index = items[i]["scene_id"]
print("正在爬取第{}页的第{}条,共{}页,共{}条".format(pageNumber, i+1, totalPages, totalItemsCount))
try:
getLabelResults(index)
if i == 199 and pageNumber <= totalPages:
getNextScencesList(index)
except Exception as e:
print('获取下个场景失败', e)
def getNextScencesList(last_known_primary_key):
global pageNumber
global totalPages
print('发起场景列表请求...')
base_url = 'https://services.trax-cloud.cn/trax-one/api/projects/swirecn/explore/scenes/all/'
headers = {
"authentication_token": getLocalStorage("'authentication_token'"),
"authorization_token": getLocalStorage("'authorization_token'"),
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36",
"refresh_token": getLocalStorage("'refresh_token'"),
"cookie": getCookies()
}
request_data = {
"limit": 200,
"from": from_date,
"to": to_date,
"direction": 'next',
"last_known_primary_key": last_known_primary_key
}
scencesList_res = requests.get(url=base_url, headers=headers, params=request_data).text
scencesList_res = json.loads(scencesList_res)
# print(scencesList_res)
# saveResultsByJson(str(2019), scencesList_res)
saveResultsByJson(date_path + '/' + date + '_' + str(pageNumber + 1), scencesList_res)
print(scencesList_res)
totalItemsCount = scencesList_res["totalItems"]["total_items"]
items = scencesList_res["items"]
print("totalItemsCount:", totalItemsCount, "items:", items)
pageNumber += 1
totalPages = math.ceil(int(totalItemsCount) / 200)
for i in range(0, 200):
index = items[i]["scene_id"]
print("正在爬取第{}页的第{}条,共{}页,共{}条".format(pageNumber, i + 1, totalPages, totalItemsCount))
try:
getLabelResults(index)
if i == 199 and pageNumber <= totalPages:
getNextScencesList(index)
except Exception as e:
print('获取下个场景失败', e)
def saveimage(imgUrl, imgPath):
request.urlretrieve(imgUrl, imgPath)
def saveResults(filename, data):
with open("{}.json".format(filename), "w", encoding='utf-8') as f:
f.write(data)
def saveResultsByJson(filename, data):
with open("{}.json".format(filename), 'w', encoding='utf-8') as json_file:
json.dump(data, json_file, ensure_ascii=False)
def mkdir(path):
path = path.strip()
path = path.rstrip("\\")
isExists = os.path.exists(path)
if not isExists:
os.makedirs(path)
print("{}创建成功".format(path))
return True
else:
print("{}已存在".format(path))
return False
if __name__ == "__main__":
from_date = '2019-03-13'
to_date = '2019-03-13'
date = from_date.replace('-', '')
date_path = "./scence/{}".format(date)
lastPageScenceId = 9237427
pageNumber = 5
totalPages = 0
mkdir(date_path)
# chromeOptions = webdriver.ChromeOptions()
# chromeOptions.add_argument('--proxy-server=https://210.16.189.230:16816')
# browser = webdriver.Chrome(chrome_options=chromeOptions)
browser = webdriver.Chrome()
wait = WebDriverWait(browser, 10)
login()
| [
"[email protected]"
]
| |
462ff12ed72a87b6f46032cc0eeb6fd1d11f6baf | af669dbef653dd69474f4c0836582bf14262c80f | /price-test/frame/lib/commonlib/configure/configunit.py | d59369edd113378ff64e2167f6f76406ff180d06 | []
| no_license | siki320/fishtest | 7a3f91639d8d4cee624adc1d4d05563611b435e9 | 7c3f024192e1c48214b53bc45105bdf9e746a013 | refs/heads/master | 2021-01-19T21:58:36.807126 | 2017-04-19T09:56:37 | 2017-04-19T09:56:37 | 88,729,049 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 745 | py | #!/usr/bin/env python
# -*- coding: GB18030 -*-
'''
Created on 2012-3-10
@author: tongdangdang
'''
class ConfigUnit(object):
'''
@author: tongdangdang
@summary: ub conf configure unit
'''
def __init__(self,key,value,father,note = ""):
self.key = key
self.value = value
self.level = -1
self.father = father
self.note = note
'''
@summary: user defined str
'''
def __str__(self):
return self.value
def __getitem__(self, key):
return self.value
#def __delitem__(self, key):
# if isinstance(self.father, configarray.ConfigArray):
# pass
# elif isinstance(self.father, configarray.ConfigArray):
# pass
| [
"[email protected]"
]
| |
caf4b456838e4066cfe9191405c63b482a8eda64 | 036f11eaae82a9c7838580d141375ab3c03f739a | /unsupervised-semantic-audio-embeddings/main.py | 53c20702308eebe08c34ffa43db3737d513dda3c | []
| no_license | silvadirceu/experiments | 8b6f1739a51803f73da89c137d07871505ddf712 | 2390392726a43aa5587e02d8ee2a423cf281463c | refs/heads/master | 2022-02-19T11:18:52.485742 | 2019-09-26T14:43:51 | 2019-09-26T14:43:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,422 | py | from __future__ import division, print_function
import zounds
import argparse
from data import dataset
from deformations import make_pitch_shift, make_time_stretch, additive_noise
from training_data import TripletSampler
from train import Trainer
from network import EmbeddingNetwork
import torch
import numpy as np
import cPickle as pickle
from search import TreeSearch
# resample all audio in our dataset to this rate
samplerate = zounds.SR11025()
# produce a base class for our audio processing graph, which will do some
# basic preprocessing and transcoding of the signal
BaseModel = zounds.resampled(resample_to=samplerate, store_resampled=True)
# the length in samples of the audio segments we'll be creating embeddings for
window_size_samples = 8192
slice_duration = samplerate.frequency * window_size_samples
# segments occurring within ten seconds of our anchor will be considered
# semantically similar
temporal_proximity = zounds.Seconds(10)
# a collection of the audio deformations we'll use during training. Temporal
# proximity is included implicitly
deformations = [
make_time_stretch(samplerate, window_size_samples),
make_pitch_shift(samplerate),
additive_noise
]
@zounds.simple_lmdb_settings(
'/hdd/sounddb2', map_size=1e11, user_supplied_id=True)
class Sound(BaseModel):
"""
An audio processing graph, that will resample each audio file to 11025hz
and store the results in an LMDB database
"""
short_windowed = zounds.ArrayWithUnitsFeature(
zounds.SlidingWindow,
wscheme=zounds.HalfLapped(),
wfunc=zounds.OggVorbisWindowingFunc(),
needs=BaseModel.resampled)
stft = zounds.ArrayWithUnitsFeature(
zounds.FFT,
needs=short_windowed)
def train(network, batch_size, device, checkpoint, weights_file_path):
"""
Train the model indefinitely
"""
sampler = TripletSampler(
Sound, slice_duration, deformations, temporal_proximity)
trainer = Trainer(
network=network,
triplet_sampler=sampler,
learning_rate=1e-4,
batch_size=batch_size,
triplet_loss_margin=0.25).to(device)
for batch_num, error in enumerate(trainer.train()):
print('Batch: {batch_num}, Error: {error}'.format(**locals()))
if batch_num % checkpoint == 0:
torch.save(network.state_dict(), weights_file_path)
def compute_all_embeddings(network):
"""
A generator that will compute embeddings for every non-overlapping segment
of duration window_size_samples in the database
"""
for snd in Sound:
windowed = snd.resampled.sliding_window(
samplerate * window_size_samples).astype(np.float32)
arr = zounds.learn.apply_network(
network, windowed, chunksize=64)
ts = zounds.ArrayWithUnits(
arr, [windowed.dimensions[0], zounds.IdentityDimension()])
print(snd._id)
yield snd._id, ts
def build_search_index(network, search_file_path, n_trees=32):
"""
Build both a brute force search index, as well as an index that uses a tree
of random hyperplane splits
"""
try:
with open(search_file_path, 'rb') as f:
search = pickle.load(f)
except IOError:
search = zounds.BruteForceSearch(
compute_all_embeddings(network), distance_metric='cosine')
with open(search_file_path, 'wb') as f:
pickle.dump(search, f, pickle.HIGHEST_PROTOCOL)
print('building tree...')
tree_search = TreeSearch(search, n_trees=n_trees)
return search, tree_search
def visualize_embeddings(network, search_file_path):
from matplotlib import cm
from sklearn.manifold import TSNE
from matplotlib import pyplot as plt
# map labels/categories to some known examples of sounds that fall into
# that category
class_to_id = {
'piano': {'AOC11B', 'CHOPINBallades-NEWTRANSFER'},
'pop': {'02.LostInTheShadowsLouGramm', '08Scandalous'},
'jazz': {'Free_20s_Jazz_Collection'},
'hip-hop': {'LucaBrasi2', 'Chance_The_Rapper_-_Coloring_Book'},
'speech': {
'Greatest_Speeches_of_the_20th_Century', 'The_Speeches-8291'},
'nintendo': {
'CastlevaniaNESMusicStage10WalkingOnTheEdge',
'SuperMarioBros3NESMusicWorldMap6'}
}
# map a color to each category
color_map = cm.Paired
color_index = dict(
(key, color_map(x)) for x, key
in zip(np.linspace(0, 1, len(class_to_id)), class_to_id.iterkeys()))
# map sound ids to their labels
id_index = dict()
for snd in Sound:
for label, _ids in class_to_id.iteritems():
for _id in _ids:
if _id in snd._id:
id_index[snd._id] = label
# reduce the entire database of computed embeddings to just those with the
# ids we care about
search, tree_search = build_search_index(
network, search_file_path, n_trees=1)
# build up two sequences, one that contains the indices we're interested in
# and the other that contains the color that should be assigned to that
# data point
indices = []
labels = []
for index, pair in enumerate(search._ids):
_id, _ = pair
try:
label = id_index[_id]
labels.append(label)
indices.append(index)
except KeyError:
continue
indices = np.array(indices)
labels = np.array(labels)
# shuffle indices and take the first N
new_indices = np.random.permutation(len(indices))[:int(2e4)]
indices = indices[new_indices]
labels = labels[new_indices]
embeddings = search.index[indices]
print(embeddings.shape)
# dist = cosine_distances(embeddings, embeddings)
# print(dist.shape)
model = TSNE(metric='cosine')
points = model.fit_transform(embeddings)
print(points.shape)
plt.figure(figsize=(15, 15))
for label in class_to_id.iterkeys():
label_indices = np.where(labels == label)[0]
p = points[label_indices]
color = color_index[label]
plt.scatter(p[:, 0], p[:, 1], c=[color], label=label, edgecolors='none')
plt.xticks([])
plt.yticks([])
plt.legend()
plt.savefig('t-SNE.png')
def compare_search_indices(network, search_file_path):
search, tree_search = build_search_index(
network, search_file_path, n_trees=64)
tree_search.compare_and_plot(
n_trees=[1, 2, 4, 8, 16, 32, 64],
n_iterations=50,
n_results=50)
def visualize_tree(network, search_file_path):
search, tree_search = build_search_index(
network, search_file_path, n_trees=1)
tree_search.visualize_tree()
def demo_negative_mining(network, batch_size, device):
from matplotlib import pyplot as plt, gridspec
from itertools import product
sampler = TripletSampler(
Sound, slice_duration, deformations, temporal_proximity)
trainer = Trainer(
network=network,
triplet_sampler=sampler,
learning_rate=1e-4,
batch_size=batch_size,
triplet_loss_margin=0.25).to(device)
spec = gridspec.GridSpec(4, 4, wspace=0.25, hspace=0.25)
fig = plt.figure(figsize=(15, 15))
for x, y in product(xrange(4), xrange(4)):
anchor_to_positive, anchor_to_negative, mined_anchor_to_negative = \
trainer.negative_mining_demo()
ax = plt.subplot(spec[x, y])
ax.plot(anchor_to_positive, label='anchor-to-positive')
ax.plot(anchor_to_negative, label='anchor-to-negative')
ax.plot(mined_anchor_to_negative, label='mined-anchor-to-negative')
ax.set_xticks([])
ax.set_ylim(0, 1.0)
plt.legend(bbox_to_anchor=(1, 0), loc="lower right")
plt.savefig('negative_mining.png', format='png')
fig.clf()
if __name__ == '__main__':
parser = argparse.ArgumentParser(parents=[
zounds.ui.AppSettings()
])
parser.add_argument(
'--ingest',
help='should data be ingested',
action='store_true')
parser.add_argument(
'--batch-size',
help='Batch size to use when training',
type=int)
parser.add_argument(
'--checkpoint',
help='save network weights every N batches',
type=int)
parser.add_argument(
'--weights-file-path',
help='the name of the file where weights should be saved')
parser.add_argument(
'--search',
help='test the search',
action='store_true')
parser.add_argument(
'--search-file-path',
help='the path where a pre-built search should be stored',
required=False)
parser.add_argument(
'--demo-negative-mining',
help='run a demo of within-batch semi-hard negative mining',
action='store_true')
parser.add_argument(
'--compare-search-indices',
help='run a comparison of search indices',
action='store_true')
parser.add_argument(
'--visualize-tree',
help='produce a visualization of one hyperplane tree',
action='store_true')
parser.add_argument(
'--visualize-embeddings',
help='produce a 2d visualiation of the embeddings using t-SNE',
action='store_true'
)
args = parser.parse_args()
if args.ingest:
zounds.ingest(dataset, Sound, multi_threaded=True)
network, device = EmbeddingNetwork.load_network(args.weights_file_path)
if args.search:
search, tree_search = build_search_index(
network=network,
search_file_path=args.search_file_path)
elif args.demo_negative_mining:
demo_negative_mining(network, args.batch_size, device)
elif args.compare_search_indices:
compare_search_indices(network, args.search_file_path)
elif args.visualize_tree:
visualize_tree(network, args.search_file_path)
elif args.visualize_embeddings:
visualize_embeddings(network, args.search_file_path)
else:
train(
network=network,
batch_size=args.batch_size,
device=device,
checkpoint=args.checkpoint,
weights_file_path=args.weights_file_path)
app = zounds.ZoundsApp(
model=Sound,
visualization_feature=Sound.stft,
audio_feature=Sound.ogg,
globals=globals(),
locals=locals())
app.start(port=args.port)
| [
"[email protected]"
]
| |
8bc1f3af1ca811d884a225dbd76851c0ad13c46a | 1da15a0ec8eb771d4584b3997d44d2af23d53484 | /D3/1220.Magnetic.py | 2da7b7c711faaafaf1586b556cbc79aeea42fe62 | []
| no_license | cdh3261/Algorithm_Problems | 1e9ad0310490ffe5396f8cef3205885d62ebefb7 | d9ad791e9a0bcdd1c13b8e18fa993b784a53b064 | refs/heads/master | 2020-08-29T07:27:04.331917 | 2020-03-06T11:33:57 | 2020-03-06T11:33:57 | 217,966,844 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 517 | py | ####### N극 #######
####### S극 #######
for t in range(1, 11):
n = int(input())
arr = [list(map(int, input().split())) for i in range(n)]
col = []
for i in range(n):
a = []
for j in range(n):
if arr[j][i] != 0:
a.append(arr[j][i])
col.append(a)
cnt = 0
for i in range(n):
for j in range(len(col[i])):
if j != 0 and col[i][j] == 2 and col[i][j - 1] != 2:
cnt += 1
print(f'#{t} {cnt}') | [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.