prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>basespace.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# filename: basespace.py
#
# Copyright (c) 2015 Bryan Briney
# License: The MIT license (http://opensource.org/licenses/MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,<|fim▁hole|>#
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
from datetime import datetime
import json
import os
import platform
from shutil import copyfile
import sys
import time
from BaseSpacePy.api.BaseSpaceAPI import BaseSpaceAPI
from BaseSpacePy.model.QueryParameters import QueryParameters as qp
from abutils.utils import log
from abutils.utils.pipeline import make_dir
from abutils.utils.progbar import progress_bar
if sys.version_info[0] > 2:
raw_input = input
logger = log.get_logger('basespace')
class BaseSpace(object):
def __init__(self, project_id=None, project_name=None, get_all_projects=False):
super(BaseSpace, self).__init__()
# BaseSpace credentials
creds = self._get_credentials()
self.client_key = creds['client_id']
self.client_secret = creds['client_secret']
self.access_token = creds['access_token']
self.version = creds['version']
self.api_server = creds['api_server']
self.api = BaseSpaceAPI(self.client_key, self.client_secret, self.api_server, self.version, AccessToken=self.access_token)
self.params = qp(pars={'Limit': 1024, 'SortDir': 'Desc'})
if project_id is not None:
self.project_id = project_id
self.project_name = None
elif project_name is not None:
self.project_name = project_name
self.project_id = self._get_project_id_from_name(project_name)
else:
self.project_id = None
self.project_name = None
# self.project_id, self.project_name = self._user_selected_project_id()
self._runs = None
@property
def runs(self):
if self._runs is None:
self._runs = self.api.getAccessibleRunsByUser(queryPars=self.params)
return self._runs
def _get_credentials(self):
# BaseSpace credentials file should be in JSON format
cred_file = os.path.expanduser('~/.abstar/basespace_credentials')
cred_handle = open(cred_file, 'r')
return json.load(cred_handle)
def _get_project_id_from_name(self):
projects = self.api.getProjectByUser(queryPars=self.params)
for project in projects:
name = project.Name.encode('ascii', 'ignore')
if sys.version_info[0] > 2:
name = name.decode('utf-8')
if name == self.project_name:
return project.Id
print('No projects matched the given project name ({})'.format(name))
sys.exit(1)
def _user_selected_project_id(self):
projects = self.api.getProjectByUser(queryPars=self.params)
self.print_basespace_project()
offset = 0
while True:
for i, project in enumerate(projects[offset * 25:(offset * 25) + 25]):
project_name = project.Name.encode('ascii', 'ignore')
if sys.version_info[0] > 2:
project_name = project_name.decode('utf-8')
print('[ {} ] {}'.format(i + (offset * 25), project_name))
print('')
project_index = raw_input("Select the project number (or 'next' to see more projects): ")
try:
project_index = int(project_index)
selected_id = projects[project_index].Id
selected_name = projects[project_index].Name.encode('ascii', 'ignore')
if sys.version_info[0] > 2:
selected_name = selected_name.decode('utf-8')
return selected_id, selected_name
except:
offset += 1
selected_id = projects[project_index].Id
selected_name = projects[project_index].Name.encode('ascii', 'ignore')
if sys.version_info[0] > 2:
selected_name = selected_name.decode('utf-8')
return selected_id, selected_name
# return projects[project_index].Id, projects[project_index].Name.encode('ascii', 'ignore')
def _get_projects(self, start=0):
projects = self.api.getProjectByUser(queryPars=self.params)
self.print_basespace_project()
for i, project in enumerate(projects[:25]):
project_name = project.Name.encode('ascii', 'ignore')
if sys.version_info[0] > 2:
project_name = project_name.decode('utf-8')
print('[ {} ] {}'.format(i, project_name))
print('')
return projects
def _get_samples(self, project_id):
samples = []
offset = 0
while True:
query_params = qp(pars={'Limit': 1024, 'SortDir': 'Asc', 'Offset': offset * 1024})
s = self.api.getSamplesByProject(project_id, queryPars=query_params)
if not s:
break
samples.extend(s)
offset += 1
return samples
def _get_files(self):
files = []
samples = self._get_samples(self.project_id)
for sample in samples:
files.extend(self.api.getFilesBySample(sample.Id, queryPars=self.params))
return files
def download(self, direc):
if all([self.project_id is None, self.project_name is None]):
self.project_id, self.project_name = self._user_selected_project_id()
files = self._get_files()
self.print_download_info(files)
start = time.time()
for i, f in enumerate(files):
# self.log.write('[ {} ] {}\n'.format(i, str(f)))
logger.info('[ {} ] {}'.format(i, str(f)))
f.downloadFile(self.api, direc)
end = time.time()
self.print_completed_download_info(start, end)
return len(files)
def print_basespace_project(self):
print('')
print('')
print('========================================')
print('BaseSpace Project Selection')
print('========================================')
print('')
def print_download_info(self, files):
logger.info('')
logger.info('')
logger.info('========================================')
logger.info('Downloading files from BaseSpace')
logger.info('========================================')
logger.info('')
logger.info('Identified {0} files for download.'.format(len(files)))
logger.info('')
def print_completed_download_info(self, start, end):
logger.info('')
logger.info('Download completed in {0} seconds'.format(end - start))
def parse_args():
parser = argparse.ArgumentParser("Downloads sequencing data from BaseSpace, Illumina's cloud storage platform.")
parser.add_argument('-d', '--download-directory',
dest='download_directory',
required=True,
help="Directory into which BaseSpace data will be downloaded.")
parser.add_argument('--project-id',
default=None,
help='ID of the project to be downloaded. Optional.')
parser.add_argument('--project-name',
default=None,
help='Name of the project to be downloaded. Optional.')
args = parser.parse_args()
return args
def download(download_directory, project_id=None, project_name=None):
'''
Downloads sequencing data from BaseSpace (Illumina's cloud storage platform).
Before accessing BaseSpace through the AbStar API, you need to set up a
credentials file:
1. You need a BaseSpace access token. The easiest way to do this is to
set up a BaseSpace developer account following
`these instructions <https://support.basespace.illumina.com/knowledgebase/articles/403618-python-run-downloader>`_
2. Make a BaseSpace credentials file using your developer credentials::
$ make_basespace_credfile
and follow the instructions.
Examples:
If you know the name of the project you'd like to download::
from abstar.utils import basespace
basespace.download('/path/to/download_directory', project_name='MyProject')
If you know the ID of the project you'd like to download::
basespace.download('/path/to/download_directory', project_id='ABC123')
If neither ``project_id`` nor ``project_name`` is provided, a list of your available
BaseSpace projects will be provided and you can select a project from that list::
basespace.download('/path/to/download_directory')
Args:
download_directory (str): Directory into which the raw sequences files should
be downloaded. If the directory does not exist, it will be created.
project_id (str): ID of the project to be downloaded.
project_name (str): Name of the project to be downloaded.
Returns:
int: The number of sequence files downloaded.
'''
make_dir(download_directory)
bs = BaseSpace(project_id, project_name)
return bs.download(download_directory)
def copy_from_basemount(basemount_directory, destination_directory):
make_dir(os.path.abspath(destination_directory))
fastqs = []
for (path, dirs, files) in os.walk(basemount_directory):
for f in files:
if f.endswith('.fastq.gz'):
fastqs.append(os.path.join(path, f))
logger.info('')
logger.info('')
logger.info('========================================')
logger.info('Copying files from BaseMount')
logger.info('========================================')
logger.info('')
logger.info('Found {0} FASTQ files.'.format(len(fastqs)))
logger.info('')
logger.info('Copying:')
start = datetime.now()
progress_bar(0, len(fastqs), start_time=start)
for i, fastq in enumerate(fastqs):
dest = os.path.join(destination_directory, os.path.basename(fastq))
copyfile(fastq, dest)
progress_bar(i + 1, len(fastqs), start_time=start)
print('\n')
if __name__ == '__main__':
args = parse_args()
download(args.download_directory,
project_id=args.project_id,
project_name=args.project_name)<|fim▁end|>
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
<|file_name|>snpTableMaker.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import random
__author__ = 'duceppemo'
class SnpTableMaker(object):
"""
Everything is ran inside the class because data structures have to be
shared across parent and child process during multi threading
"""
def __init__(self, args):
import os
import sys
import glob
import multiprocessing
# Define variables based on supplied arguments
self.args = args
self.ref = args.ref
if not os.path.isfile(self.ref):
sys.exit('Supplied reference genome file does not exists.')
self.vcf = args.vcf
if not os.path.isdir(self.vcf):
sys.exit('Supplied VCF folder does not exists.')
self.minQUAL = args.minQUAL
if not isinstance(self.minQUAL, (int, long)):
sys.exit('minQual value must be an integer')
self.ac1_report = args.ac1
self.section4 = args.section4
self.output = args.output
if not os.path.isdir(self.output):
os.makedirs(self.output)
self.table = args.table
# number of threads to use = number of cpu
self.cpus = int(multiprocessing.cpu_count())
# create dictionaries to hold data
self.refgenome = dict()
self.vcfs = dict()
self.ac1s = dict()
self.ac2s = dict()
self.allac2 = dict()
self.finalac1 = dict()
self.fastas = dict()
self.counts = dict()
self.informative_pos = dict()
# create a list of vcf files in vcfFolder
self.vcfList = list()
for filename in glob.glob(os.path.join(self.vcf, '*.vcf')):
self.vcfList.append(filename)
# run the script
self.snp_table_maker()
def snp_table_maker(self):
self.parse_ref()
self.parse_vcf()
self.find_ac1_in_ac2()
self.write_ac1_report()
self.get_allele_values()
self.get_informative_snps()
self.count_snps()
self.write_fasta()
self.write_root()
self.write_snp_table()
def parse_ref(self):
from Bio import SeqIO
print ' Parsing reference genome'
fh = open(self.ref, "rU")
self.refgenome = SeqIO.to_dict(SeqIO.parse(fh, "fasta"))
fh.close()
def parse_vcf(self):
import sys
print ' Parsing VCF files'
for samplefile in self.vcfList:
sample = os.path.basename(samplefile).split('.')[0] # get what's before the first dot
self.vcfs[sample] = dict()
with open(samplefile, 'r') as f: # open file
for line in f: # read file line by line
line = line.rstrip() # chomp -> remove trailing whitespace characters<|fim▁hole|> elif line.startswith('#CHROM'):
sample_name = line.split("\t")[9]
if sample_name != sample:
sys.exit('File name and sample name inside VCF file are different: %s'
% samplefile)
else:
# chrom, pos, alt, qual = [line.split()[i] for i in (0, 1, 4, 5)]
chrom = line.split()[0]
pos = int(line.split()[1])
alt = line.split()[4]
qual = line.split()[5] # string -> needs to be converted to integer
if qual != '.':
try:
qual = float(qual)
except ValueError:
qual = int(qual)
else:
continue # skip line
ac = line.split()[7].split(';')[0]
# http://www.saltycrane.com/blog/2010/02/python-setdefault-example/
self.vcfs.setdefault(sample, {}).setdefault(chrom, {}).setdefault(pos, [])\
.append(alt)
if ac == 'AC=1' and qual > self.args.minQUAL:
self.ac1s.setdefault(sample, {}).setdefault(chrom, []).append(pos)
elif ac == 'AC=2' and qual > self.args.minQUAL:
self.ac2s.setdefault(sample, {}).setdefault(chrom, []).append(pos)
# This is equivalent, but faster?
try:
if pos not in self.allac2[chrom]: # only add is not already present
self.allac2.setdefault(chrom, []).append(pos)
except KeyError: # chromosome does not exist in dictionary
self.allac2.setdefault(chrom, []).append(pos)
# This works
# if chrom in self.allac2:
# if pos in self.allac2[chrom]:
# pass
# else:
# self.allac2.setdefault(chrom, []).append(pos)
# else:
# self.allac2.setdefault(chrom, [])
def find_ac1_in_ac2(self):
print ' Finding AC=1/AC=2 positions'
if isinstance(self.ac1s, dict): # check if it's a dict before using .iteritems()
for sample, chromosomes in self.ac1s.iteritems():
if isinstance(chromosomes, dict): # check for dict
for chrom, positions in chromosomes.iteritems():
if isinstance(positions, list): # check for list
for pos in positions:
if pos in self.allac2[chrom]: # check ac1 in ac2
self.finalac1.setdefault(sample, {}).setdefault(chrom, []).append(pos)
def write_ac1_report(self):
print " Writing AC=1/AC=2 report to file"
# free up resources not needed anymore
self.ac1s.clear()
fh = open(self.ac1_report, 'w')
if isinstance(self.finalac1, dict):
for sample, chromosomes in sorted(self.finalac1.iteritems()):
if isinstance(chromosomes, dict):
for chrom, positions in sorted(chromosomes.iteritems()):
if isinstance(positions, list):
fh.write("{}\nAC=1 is also found in AC=2 in chromosome {}".format(sample, chrom) +
" at position(s): " + ', '.join(map(str, positions)) + "\n\n")
fh.close()
def get_allele_values(self):
print ' Getting allele values'
for sample in self.ac2s:
for chrom in self.ac2s[sample]:
for pos in self.allac2[chrom]:
# if in AC=2 for that sample
if pos in self.ac2s[sample][chrom]:
allele = ''.join(self.vcfs[sample][chrom][pos]) # convert list to string
else:
try: # use a try here because some samples are not in finalac1
# if in AC=1 for that sample, but also in AC=2 in other sample
if pos in self.finalac1[sample][chrom]:
allele = ''.join(self.vcfs[sample][chrom][pos]) # convert list to string
else:
allele = self.refgenome[chrom].seq[pos - 1]
except KeyError:
allele = self.refgenome[chrom].seq[pos - 1]
self.fastas.setdefault(sample, {}).setdefault(chrom, {}).setdefault(pos, []).append(allele)
# Track all alleles for each position
try:
if allele not in self.counts[chrom][pos]:
self.counts.setdefault(chrom, {}).setdefault(pos, []).append(allele)
except KeyError:
self.counts.setdefault(chrom, {}).setdefault(pos, []).append(allele)
def get_informative_snps(self):
"""SNPs position that have at least one different ALT allele within all the samples"""
print ' Getting informative SNPs'
# free up resources not needed anymore
self.ac2s.clear()
self.allac2.clear()
self.finalac1.clear()
# need to get the positions in the same order for all the sample (sort chrom and pos)
for sample in self.fastas:
for chrom in sorted(self.fastas[sample]):
for pos in sorted(self.fastas[sample][chrom]):
if len(self.counts[chrom][pos]) > 1: # if more that one ALT allele, keep it
allele = ''.join(self.fastas[sample][chrom][pos]) # convert list to string
# check if allele is empty
if allele:
self.informative_pos.setdefault(sample, {}).setdefault(chrom, {})\
.setdefault(pos, []).append(''.join(allele))
else:
print "No allele infor for {}, {}:{}".format(sample, chrom, pos)
def count_snps(self):
print ' Counting SNPs'
# free up resources not needed anymore
self.counts.clear()
# All samples should have the same number of informative SNPs
# so any can be used to get the stats
randomsample = random.choice(self.informative_pos.keys())
filteredcount = 0
informativecount = 0
# Account for multiple chromosome
for chrom in self.fastas[randomsample]:
filteredcount += len(self.fastas[randomsample][chrom]) # number of positions
informativecount += len(self.informative_pos[randomsample][chrom])
# print to screen
print "\nTotal filtered SNPs: {}".format(filteredcount)
print "Total informative SNPs: {}\n".format(informativecount)
# write to file
fh = open(self.section4, "a") # append mode
fh.write("Total filtered SNPs: {}\n".format(filteredcount))
fh.write("Total informative SNPs: {}\n\n".format(informativecount))
fh.close()
def write_fasta(self):
print ' Writing sample fasta files'
# free up resources not needed anymore
self.fastas.clear()
# Create output folder for fasta files
if not os.path.exists(self.output):
os.makedirs(self.output)
if isinstance(self.informative_pos, dict):
for sample, chromosomes in sorted(self.informative_pos.iteritems()):
samplepath = os.path.join(self.output, sample + '.fas')
fh = open(samplepath, 'w')
fh.write(">{}\n".format(sample))
if isinstance(chromosomes, dict):
for chrom, positions in sorted(chromosomes.iteritems()):
if isinstance(positions, dict):
for pos, allele in sorted(positions.iteritems()):
if isinstance(allele, list):
fh.write(''.join(allele)) # convert list to text
fh.write("\n")
def write_root(self):
print ' Writing root fasta file'
rootpath = os.path.join(self.output, 'root.fas')
randomsample = random.choice(self.informative_pos.keys())
rootseq = list()
fh = open(rootpath, 'w')
if isinstance(self.informative_pos, dict):
for chrom in self.informative_pos[randomsample]:
for pos in sorted(self.informative_pos[randomsample][chrom]):
rootseq.append(self.refgenome[chrom].seq[pos - 1])
fh.write(">root\n" + "{}\n".format(''.join(rootseq)))
def write_snp_table(self):
print ' Writing SNP table'
fh = open(self.table, 'w')
randomsample = random.choice(self.informative_pos.keys())
ref_pos = list()
ref_call = list()
# reference
if isinstance(self.informative_pos, dict):
for chrom in self.informative_pos[randomsample]:
for pos in sorted(self.informative_pos[randomsample][chrom]):
ref_pos.append(''.join(chrom) + '-' + str(pos))
ref_call.append(self.refgenome[chrom].seq[pos - 1])
fh.write("reference_pos\t{}\n".format("\t".join(ref_pos)))
fh.write("reference_call\t{}\n".format("\t".join(ref_call)))
# sample
if isinstance(self.informative_pos, dict):
for sample, chromosomes in self.informative_pos.iteritems():
fh.write("{}".format(sample))
if isinstance(chromosomes, dict):
for chrom, positions in sorted(chromosomes.iteritems()):
if isinstance(positions, dict):
for pos, allele in sorted(positions.iteritems()):
if isinstance(allele, list):
allele = ''.join(allele) # convert list to text
fh.write("\t{}".format(allele))
fh.write("\n")
fh.close()
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser(description='Generate SNP table and aligned fasta files from VCF files')
parser.add_argument('-r', '--ref', metavar='ref.fasta',
required=True,
help='reference genome used in the VCF files')
parser.add_argument('-v', '--vcf', metavar='vcfFolder',
required=True,
help='location of the VCF files')
parser.add_argument('-q', '--minQUAL', metavar='minQUAL', type=int,
required=True,
help='minimum QUAL value in VCF file')
parser.add_argument('-ac1', '--ac1', metavar='AC1Report.txt',
required=True,
help='output file where positions having both AC=1 and AC=2 are reported')
parser.add_argument('-s4', '--section4', metavar='section4.txt',
required=True,
help='output file where total filtered SNP positions and total informative SNPs are reported')
parser.add_argument('-o', '--output', metavar='fastaOutFolder',
required=True,
help='folder where the output fasta files will be output')
parser.add_argument('-t', '--table', metavar='fastaTable.tsv',
required=True,
help='the SNP table')
# Get the arguments into an object
arguments = parser.parse_args()
SnpTableMaker(arguments)<|fim▁end|>
|
if line: # skip blank lines or lines with only whitespaces
if line.startswith('##'): # skip comment lines
continue
|
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|># Copyright 2017 ForgeFlow S.L.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Purchase Order Approved",
"summary": "Add a new state 'Approved' in purchase orders.",
"version": "14.0.1.1.0",
"category": "Purchases",<|fim▁hole|> "application": False,
"installable": True,
"depends": ["purchase_stock"],
"data": ["views/purchase_order_view.xml", "views/res_config_view.xml"],
}<|fim▁end|>
|
"website": "https://github.com/OCA/purchase-workflow",
"author": "ForgeFlow, Odoo Community Association (OCA)",
"license": "AGPL-3",
|
<|file_name|>genderController.js<|end_file_name|><|fim▁begin|>import { exec, getById } from "../database/database";
import Gender from "../entities/gender";
export default class GenderController {
constructor() {}
static getById(id, as_object = true) {
let gender = null;
let results = getById(id,
`<|fim▁hole|> `);
if(results) {
gender = (as_object) ? new Gender(results) : results;
}
console.log(results);
return gender;
}
}<|fim▁end|>
|
SELECT
t1.id,
t1.identifier as name
FROM genders as t1
|
<|file_name|>event.py<|end_file_name|><|fim▁begin|>"""
sentry.models.event
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import logging
from django.db import models
from django.utils import timezone
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from sentry.constants import LOG_LEVELS, MAX_CULPRIT_LENGTH
from sentry.db.models import (
Model, NodeField, BoundedIntegerField, BoundedPositiveIntegerField,
BaseManager, sane_repr
)
from sentry.utils.cache import memoize
from sentry.utils.imports import import_string
from sentry.utils.safe import safe_execute
from sentry.utils.strings import truncatechars, strip
class Event(Model):
"""
An individual event.
"""
group = models.ForeignKey('sentry.Group', blank=True, null=True, related_name="event_set")
event_id = models.CharField(max_length=32, null=True, db_column="message_id")
project = models.ForeignKey('sentry.Project', null=True)
logger = models.CharField(
max_length=64, blank=True, default='root', db_index=True)
level = BoundedPositiveIntegerField(
choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True,
db_index=True)
message = models.TextField()
culprit = models.CharField(
max_length=MAX_CULPRIT_LENGTH, blank=True, null=True,
db_column='view')
checksum = models.CharField(max_length=32, db_index=True)
num_comments = BoundedPositiveIntegerField(default=0, null=True)
platform = models.CharField(max_length=64, null=True)
datetime = models.DateTimeField(default=timezone.now, db_index=True)
time_spent = BoundedIntegerField(null=True)
server_name = models.CharField(max_length=128, db_index=True, null=True)
site = models.CharField(max_length=128, db_index=True, null=True)
data = NodeField(blank=True, null=True)
objects = BaseManager()
class Meta:
app_label = 'sentry'
db_table = 'sentry_message'
verbose_name = _('message')
verbose_name_plural = _('messages')
unique_together = ('project', 'event_id')
__repr__ = sane_repr('project_id', 'group_id', 'checksum')
def error(self):
message = strip(self.message)
if not message:
message = '<unlabeled message>'
else:
message = truncatechars(message.splitlines()[0], 100)
return message
error.short_description = _('error')
def has_two_part_message(self):
message = strip(self.message)
return '\n' in message or len(message) > 100
def message_top(self):
culprit = strip(self.culprit)
if culprit:
return culprit
return self.error()
@property
def team(self):
return self.project.team
@memoize
def ip_address(self):
http_data = self.data.get('sentry.interfaces.Http')
if http_data and 'env' in http_data:
value = http_data['env'].get('REMOTE_ADDR')
if value:
return value
user_data = self.data.get('sentry.interfaces.User')
if user_data:
value = user_data.get('ip_address')
if value:
return value
return None
@memoize
def user_ident(self):
"""
The identifier from a user is considered from several interfaces.
In order:
- User.id
- User.email
- User.username
- Http.env.REMOTE_ADDR
"""
user_data = self.data.get('sentry.interfaces.User')
if user_data:
ident = user_data.get('id')
if ident:
return 'id:%s' % (ident,)
ident = user_data.get('email')
if ident:
return 'email:%s' % (ident,)
ident = user_data.get('username')
if ident:
return 'username:%s' % (ident,)
ident = self.ip_address
if ident:
return 'ip:%s' % (ident,)
return None
@memoize
def interfaces(self):
result = []
for key, data in self.data.iteritems():
if '.' not in key:
continue
try:
cls = import_string(key)
except ImportError:
continue # suppress invalid interfaces
value = safe_execute(cls, **data)
if not value:
continue
result.append((key, value))
return SortedDict((k, v) for k, v in sorted(result, key=lambda x: x[1].get_score(), reverse=True))
def get_version(self):
if not self.data:
return
if '__sentry__' not in self.data:<|fim▁hole|> return
if 'version' not in self.data['__sentry__']:
return
module = self.data['__sentry__'].get('module', 'ver')
return module, self.data['__sentry__']['version']
def get_tags(self):
try:
return [
(t, v) for t, v in self.data.get('tags') or ()
if not t.startswith('sentry:')
]
except ValueError:
# at one point Sentry allowed invalid tag sets such as (foo, bar)
# vs ((tag, foo), (tag, bar))
return []
def as_dict(self):
# We use a SortedDict to keep elements ordered for a potential JSON serializer
data = SortedDict()
data['id'] = self.event_id
data['checksum'] = self.checksum
data['project'] = self.project.slug
data['logger'] = self.logger
data['level'] = self.get_level_display()
data['culprit'] = self.culprit
data['datetime'] = self.datetime
data['time_spent'] = self.time_spent
for k, v in sorted(self.data.iteritems()):
data[k] = v
return data
@property
def size(self):
return len(unicode(vars(self)))<|fim▁end|>
| |
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import os
import logging
from urllib.parse import urlencode, unquote
from flask import request, current_app
from flask_api import FlaskAPI
from flask_api.exceptions import APIException, NotFound
from . import services
from . import stores
from . import routes
log = logging.getLogger('api')
class TemplateNotFound(NotFound):
detail = "Template not found."
class InvalidMaskedCode(NotFound):
detail = "Masked URL does not match any image."
class FilenameTooLong(APIException):
status_code = 414
detail = "Filename too long."
def create_app(config):
app = FlaskAPI(__name__)
app.config.from_object(config)
configure_logging(app)
register_services(app)
register_blueprints(app)
return app
def configure_logging(app):
if app.config['DEBUG']:
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(level=level, format="%(levelname)s: %(message)s")
logging.getLogger('yorm').setLevel(logging.WARNING)
logging.getLogger('requests').setLevel(logging.WARNING)
def register_services(app):
exceptions = services.Exceptions(
TemplateNotFound=TemplateNotFound,
InvalidMaskedCode=InvalidMaskedCode,
FilenameTooLong=FilenameTooLong,
)
templates_root = os.path.join(app.config['ROOT'], 'data', 'templates')
template_store = stores.template.TemplateStore(templates_root)
images_root = os.path.join(app.config['ROOT'], 'data', 'images')
image_store = stores.image.ImageStore(images_root)
app.link_service = services.link.LinkService(
exceptions=exceptions,
template_store=template_store,
)
app.template_service = services.template.TemplateService(
exceptions=exceptions,
template_store=template_store,
)
app.image_service = services.image.ImageService(
exceptions=exceptions,
template_store=template_store,
image_store=image_store,
debug=app.config['DEBUG']
)
def log_request(response):
if current_app.debug:
path = request.path
if request.args:
path += "?%s" % unquote(urlencode(request.args))
log.info("%s: %s - %i", request.method, path,
response.status_code)
return response
app.after_request(log_request)
def register_blueprints(app):
app.register_blueprint(routes.static.blueprint)
app.register_blueprint(routes.root.blueprint)
app.register_blueprint(routes.templates.blueprint)<|fim▁hole|> app.register_blueprint(routes.overview.blueprint)
app.register_blueprint(routes.generator.blueprint)
app.register_blueprint(routes.latest.blueprint)
app.register_blueprint(routes.aliases.blueprint)<|fim▁end|>
|
app.register_blueprint(routes.links.blueprint)
app.register_blueprint(routes.image.blueprint)
|
<|file_name|>quick-start.go<|end_file_name|><|fim▁begin|>package assets
var Init_doc_quick_start = `# 0.1 - Quick Start
This is a set of short examples with minmal explanation. It is meant as
a "quick start". Soon, we'll write a longer tour :-)
Add a file to ipfs:
echo "hello world" >hello
ipfs add hello
View it:
ipfs cat <the-hash-you-got-here>
Try a directory:
mkdir foo
mkdir foo/bar
echo "baz" > foo/baz
echo "baz" > foo/bar/baz
ipfs add -r foo
View things:
ipfs ls <the-hash-here>
ipfs ls <the-hash-here>/bar
ipfs cat <the-hash-here>/baz
ipfs cat <the-hash-here>/bar/baz
ipfs cat <the-hash-here>/bar
ipfs ls <the-hash-here>/baz
References:
ipfs refs <the-hash-here>
ipfs refs -r <the-hash-here>
ipfs refs --help
<|fim▁hole|>
Get:
ipfs get <the-hash-here> foo2
diff foo foo2
Objects:
ipfs object get <the-hash-here>
ipfs object get <the-hash-here>/foo2
ipfs object --help
Pin + GC:
ipfs pin -r <the-hash-here>
ipfs gc
ipfs ls <the-hash-here>
ipfs unpin -r <the-hash-here>
ipfs gc
Daemon:
ipfs daemon (in another terminal)
ipfs id
Network:
(must be online)
ipfs swarm peers
ipfs id
ipfs cat <hash-of-remote-object>
Mount:
(warning: fuse is finicky!)
ipfs mount
cd /ipfs/<
Tool:
ipfs version
ipfs update
ipfs commands
ipfs config --help
open http://localhost:5001/webui
Browse:
webui:
http://localhost:5001/webui
video:
http://localhost:8080/ipfs/QmVc6zuAneKJzicnJpfrqCH9gSy6bz54JhcypfJYhGUFQu/play#/ipfs/QmTKZgRNwDNZwHtJSjCp6r5FYefzpULfy37JvMt9DwvXse
images:
http://localhost:8080/ipfs/QmZpc3HvfjEXvLWGQPWbHk3AjD5j8NEN4gmFN8Jmrd5g83/cs
markdown renderer app:
http://localhost:8080/ipfs/QmX7M9CiYXjVeFnkfVGf3y5ixTZ2ACeSGyL1vBJY1HvQPp/mdown
`<|fim▁end|>
| |
<|file_name|>suggestions.js<|end_file_name|><|fim▁begin|>var clientElasticsearch = require("../../../Elasticsearch/ElasticsearchClient");
var ElasticsearchParser = require("../../../Elasticsearch/ElasticsearchParser");
var Q = require('q');
var getSuggestions=function(){
return clientElasticsearch.search({
"index":"source",
"type":"zabbix_host",
"body":{
"query" : {
"term": { "haveMapping" : false}
},
"fields" : ["id","hostname"]
},
"from":0,
"size":999999999,
"scroll" : "1m"
}).then(function(body){
// Récuparation de la recherche en liste
return ElasticsearchParser.loadFromBodyFields(body);
}).then(function(results){
var retour=[];
results.forEach(function(host){
retour.push({
"response_id" : host.id,
"response_label" : host.hostname,
"target": [
{
"label" : host.hostname
}
]
});
});
return retour;
});
}
<|fim▁hole|>module.exports = getSuggestions;<|fim▁end|>
| |
<|file_name|>CamBlur.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# create the piplinee, ball and spikes
sphere = vtk.vtkSphereSource()
sphere.SetThetaResolution(7)
sphere.SetPhiResolution(7)
sphereMapper = vtk.vtkPolyDataMapper()
sphereMapper.SetInputConnection(sphere.GetOutputPort())
sphereActor = vtk.vtkActor()
sphereActor.SetMapper(sphereMapper)
sphereActor2 = vtk.vtkActor()
sphereActor2.SetMapper(sphereMapper)
cone = vtk.vtkConeSource()
cone.SetResolution(5)
glyph = vtk.vtkGlyph3D()
glyph.SetInputConnection(sphere.GetOutputPort())
glyph.SetSourceConnection(cone.GetOutputPort())
glyph.SetVectorModeToUseNormal()
glyph.SetScaleModeToScaleByVector()
glyph.SetScaleFactor(0.25)
spikeMapper = vtk.vtkPolyDataMapper()
spikeMapper.SetInputConnection(glyph.GetOutputPort())
spikeActor = vtk.vtkActor()
spikeActor.SetMapper(spikeMapper)<|fim▁hole|>spikeActor2.SetMapper(spikeMapper)
# set the actors position and scale
spikeActor.SetPosition(0,0.7,0)
sphereActor.SetPosition(0,0.7,0)
spikeActor2.SetPosition(0,-1,-10)
sphereActor2.SetPosition(0,-1,-10)
spikeActor2.SetScale(1.5,1.5,1.5)
sphereActor2.SetScale(1.5,1.5,1.5)
ren1.AddActor(sphereActor)
ren1.AddActor(spikeActor)
ren1.AddActor(sphereActor2)
ren1.AddActor(spikeActor2)
ren1.SetBackground(0.1,0.2,0.4)
renWin.SetSize(200,200)
# do the first render and then zoom in a little
renWin.Render()
ren1.GetActiveCamera().SetFocalPoint(0,0,0)
ren1.GetActiveCamera().Zoom(1.8)
ren1.GetActiveCamera().SetFocalDisk(0.05)
renWin.SetFDFrames(11)
renWin.Render()
iren.Initialize()
#renWin SetFileName CamBlur.tcl.ppm
#renWin SaveImageAsPPM
# prevent the tk window from showing up then start the event loop
# --- end of script --<|fim▁end|>
|
spikeActor2 = vtk.vtkActor()
|
<|file_name|>A10BaseClass.py<|end_file_name|><|fim▁begin|>__author__ = 'mthompson'
import copy
import inspect
import importlib
import urllib
import json
from urlparse import urlparse
import collections
import re
import os
import keyword
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
class A10BaseClass(object):
is_POST = True
def __init__(self):
self.ERROR_MSG = ""
@classmethod
def __json__(self, class_object, **kwargs):
try:
temp_require = copy.deepcopy(class_object.required)
del class_object.required
except:
pass
try:
dp = copy.deepcopy(class_object.DeviceProxy)
delattr(class_object, "DeviceProxy")
except:
pass
json_object = self.Obj_to_json(class_object.__dict__)
if 'a10-url' in json_object:
del json_object['a10-url']
if "b-key" in json_object:
del json_object['b-key']
debug_keys = ["DEBUG-CONNECTION", "DEBUG-Payload", "DEBUG-Response", "DEBUG-URL", "DEBUG-headers"]
for d_keys in debug_keys:
try:
del json_object[d_keys]
except:
try:
del json_object[class_object.b_key][d_keys]
except:
pass
try:
class_object.__setattr__("DeviceProxy", dp)
except:
pass
#(Raunak):json_object is None or empty dictionary return None
try:
class_object.__setattr__("required", temp_require)
except:
pass
if not json_object:
return ''
elif "sub" in kwargs and kwargs['sub'] == 0:
return json_object
else:
r_object = {}
r_object[class_object.b_key] = json_object
return r_object
@classmethod
def Obj_to_json(self, obj):
new_obj = {}
for k, v in obj.items():
key = k.replace("_", "-").replace("A10WW_", "").replace(
"A10WW-", "")
if "class" in str(type(v)):
json_obj = self.__json__(v)
#(Raunak):If the response exists update the new_obj dictionary
#with the json data
if json_obj:
new_obj.update(json_obj)
elif (isinstance(v, dict) or
isinstance(v, unicode) or
isinstance(v, str) or
isinstance(v, list)) and len(v) != 0:
if isinstance(v, list):
temp_list = []
if len(v) != 0:
for i in v:
temp_list.append(i.__json__(i, sub=0))
new_obj[key] = temp_list
elif isinstance(v, dict) and len(v) != 0:
new_obj[key] = self.Obj_to_json(v)
elif v is True:
new_obj[key] = "1"
elif v is False:
new_obj[key] = "0"
elif v is not None:
if len(v) != 0:
new_obj[key] = v
elif isinstance(v, int):
new_obj[key] = v
# If it's an attribute and it's value is None, use None
# (grao): adding null value attributes only when POST (avoiding for PUT)
elif v is None and A10BaseClass.is_POST:
new_obj[key] = v
return new_obj
'''
Converts from Unicode
'''
def convert(self, data):
if isinstance(data, basestring):
return str(data)
elif isinstance(data, collections.Mapping):
return dict(map(self.convert, data.iteritems()))
elif isinstance(data, collections.Iterable):
return type(data)(map(self.convert, data))
else:
return data
"""
GET THE PARENT KEY ASS NEEDED
"""
def find_key(self, d, key):
for k, v in d.items():
if isinstance(v, dict):
p = self.find_key(v, key)
if p:
return [k] + p
elif v == key:
return [k]
def _format_key_name_for_module_lookup(self, key, lambda_expression):
if key.startswith('A10WW_'):
return lambda_expression(key)
else:
return lambda_expression(key).title()
def _search_for_child_object_module(self, caller, sub_class):
try:
if caller.__name__.endswith('_oper'):
module_name = caller.__name__.replace('_oper', '')
caller_name = importlib.import_module(module_name + "_" + sub_class.lower() + "_oper")
elif caller.__name__.endswith('_stats'):
module_name = caller.__name__.replace('_stats', '')
caller_name = importlib.import_module(module_name + "_" + sub_class.lower() + "_stats")
else:
caller_name = importlib.import_module(caller.__name__ + "_" + sub_class.lower())
except:
caller_name = importlib.import_module(caller.__name__)
return caller_name
def _search_for_child_class_inside_module(self, child_node_name, caller_name, DeviceProxy):
k = child_node_name
sub_class = ''.join(
x for x in k.replace("-list", "", 1).replace("-", "_").title() if
not x.isspace())
try:
obj_class = getattr(caller_name, sub_class)(DeviceProxy=DeviceProxy)
except:
try:
sub_class = ''.join(
x for x in k.replace("-", " ").replace("-list", "", 1).title()
if not x.isspace())
obj_class = getattr(caller_name, sub_class)(DeviceProxy=DeviceProxy)
except:
try:
sub_class = ''.join(x for x in
k.replace("-", "").replace("list", "").replace("list",
"").title()
if not x.isspace())
obj_class = getattr(caller_name, sub_class)(DeviceProxy=DeviceProxy)
except:
try:
sub_class = ''.join(x for x in
k.replace("-", "_").replace("list", "").replace("list",
"").title()
if not x.isspace())
obj_class = getattr(caller_name, sub_class)(DeviceProxy=DeviceProxy)
except:
#(RAUNAK):Added one more check for the VLAN tagged_ethernet_list like cases
#Would check for the class with the name TaggedEthernet
try:
sub_class = ''.join(x for x in
k.replace("-", "_").replace("list", "").replace(
"list",
"").title().replace('_', '')
if not x.isspace())
obj_class = getattr(caller_name, sub_class)(DeviceProxy=DeviceProxy)
except:
#(Raunak):Added this for ClassList
try:
sub_class = ''.join(x for x in
k.replace("-list", "").title() + 'List'
if not x.isspace())
obj_class = getattr(caller_name, sub_class)(DeviceProxy=DeviceProxy)
except:
pass
return obj_class
def Factory(self, DeviceProxy, obj, name, parent=''):
obj = self.convert(obj)
# for i in obj:
# self.default_object_key = i
if parent:
caller = inspect.getmodule(parent)
else:
caller = inspect.getmodule(self)
top_node_obj_list = []
for list_name_key, list_name_value in obj.items():
if list_name_key == 'class-list-list':
class_object = 'ClassList'
break
else:
class_object = ''.join(
x for x in list_name_key.replace("-", " ").replace(
"list", "").title() if not x.isspace())
if not class_object:
class_object = ''.join(
x for x in list_name_key.replace("-", " ").title() if not x.isspace())
class_object = kwbl().kwbl(class_object)
try:
caller = importlib.import_module(caller.__name__)
new_class = getattr(caller, class_object)(DeviceProxy=DeviceProxy)
except:
try:
caller = importlib.import_module(caller.__name__)
new_class = getattr(caller, class_object + 'List')(DeviceProxy=DeviceProxy)
except:
try:
caller = importlib.import_module(caller.__name__ + "_" + list_name_key.lower())
new_class = getattr(caller, class_object)(DeviceProxy=DeviceProxy)
except:
try:
caller = importlib.import_module(
caller.__name__ + "_" + list_name_key.replace('-', '_').lower())
new_class = getattr(caller, class_object)(DeviceProxy=DeviceProxy)
except:
try:
temp_name = list_name_key.split("-")
test_module = temp_name[0]
caller = importlib.import_module(caller.__name__ + "_" + test_module.lower())
new_class = getattr(caller, class_object)(DeviceProxy=DeviceProxy)
except:
try:
caller = importlib.import_module(
caller.__name__ + "_" + parent.lower() + "_" + list_name_key.lower())
new_class = getattr(caller, class_object)(DeviceProxy=DeviceProxy)
except:
try:
caller = importlib.import_module(caller.__name__ + "_" + parent.lower())
new_class = getattr(caller, class_object)(DeviceProxy=DeviceProxy)
except Exception as e:
try:
# try to import class from all sibling modules
found_in_sibling = False
for module in os.listdir(os.path.dirname(caller.__file__)):
if module.endswith("py") and module != "__init__.py":
try:
class_name = ''.join(
x for x in list_name_key.split("-").pop().title() if
not x.isspace())
caller_sibling_modules = importlib.import_module(
"." + module.replace(".py", ""), caller.__package__)
new_class = getattr(caller_sibling_modules, class_name)(
DeviceProxy=DeviceProxy)
found_in_sibling = True
break
except:
try:
class_name = ''.join(
x for x in list_name_key.replace("-", " ").title() if
not x.isspace())
caller_sibling_modules = importlib.import_module(
"." + module.replace(".py", ""), caller.__package__)
new_class = getattr(caller_sibling_modules, class_name)(
DeviceProxy=DeviceProxy)
found_in_sibling = True
break
except:
pass
pass
if found_in_sibling is False:
raise e
except:
return obj
# class_object
if isinstance(list_name_value, list):
for list_obj in list_name_value:
sdk_obj = copy.deepcopy(new_class)
for k, v in list_obj.items():
k = kwbl().kwbl(k)
new_obj_name = k.replace("-list", "", 1).replace("-", "_")
if new_obj_name[len(new_obj_name) - 1] == "_":
new_obj_name = ''.join(
x for x in k.replace("-", " ").replace("-list", "", 1).title() if
not x.isspace())
if isinstance(v, list):
obj_name_list = []
caller_name = self._search_for_child_object_module(caller, new_obj_name)
for keys in v:
obj_class = self._search_for_child_class_inside_module(k, caller_name, DeviceProxy)
# obj_class = getattr(caller_name, new_obj_name)(DeviceProxy=DeviceProxy)
for v_key, v_val in keys.items():
v_key = kwbl().kwbl(v_key)<|fim▁hole|> if v_key == "a10-url":
temp_v = urlparse(v_val)
v_val = temp_v.path
obj_class.__setattr__(v_key.replace("-", "_"), v_val)
obj_name_list.append(obj_class)
sdk_obj.__setattr__(k.replace("-", "_"), obj_name_list)
elif isinstance(v, dict):
sdk_obj.__setattr__(k.replace("-", "_"),
(sdk_obj.Factory(DeviceProxy, {k: v}, k, sdk_obj)))
else:
sdk_obj.__setattr__(new_obj_name.replace("-", "_"), v)
top_node_obj_list.append(copy.deepcopy(sdk_obj))
else:
with_native_list_in_name = ['access-list', 'ip-map-list', 'acl-id-list-list', 'acl-name-list-list',
'lsn-rule-list',
'inside-src-permit-list', 'nat-ip-list', 'inside-ip-list', 'ipv4-list']
for k, v in list_name_value.items():
k = kwbl().kwbl(k)
if k == "a10-url":
temp_v = urlparse(v)
v = temp_v.path
if k in with_native_list_in_name:
new_obj_name = with_native_list_in_name[with_native_list_in_name.index(k)].replace('-', '_')
# Bug 191464 Mike Thompson detecting type vs name.
elif not isinstance(v, list):
new_obj_name = k.replace("-", "_")
else:
new_obj_name = k.replace("-list", "", 1).replace("-", "_") if k.endswith('list') else k.replace(
"-", "_")
if isinstance(v, list):
obj_name_list = []
caller_name = self._search_for_child_object_module(caller, new_obj_name)
for keys in v:
obj_class = self._search_for_child_class_inside_module(k, caller_name, DeviceProxy)
for v_key, v_val in keys.items():
v_key = kwbl().kwbl(v_key)
if v_key == "a10-url":
temp_v = urlparse(v_val)
v_val = temp_v.path
elif isinstance(v_val, dict):
obj_class.__setattr__(v_key.replace("-", "_"), (
obj_class.Factory(DeviceProxy, {v_key: v_val}, v_key, obj_class)))
continue
elif isinstance(v_val, list):
sub_obj = obj_class.Factory(DeviceProxy, {v_key: v_val}, v_key, obj_class)
sub_obj_list = getattr(obj_class, v_key, [])
if isinstance(sub_obj, list):
sub_obj_list = sub_obj_list + sub_obj
else:
sub_obj_list.append(sub_obj)
setattr(obj_class, v_key.replace("-", "_"), sub_obj_list)
continue
obj_class.__setattr__(v_key.replace("-", "_"), v_val)
obj_name_list.append(obj_class)
new_class.__setattr__(k.replace("-", "_"), copy.deepcopy(obj_name_list))
elif isinstance(v, dict):
new_class.__setattr__(new_obj_name, (new_class.Factory(DeviceProxy, {k: v}, k, new_class)))
else:
new_class.__setattr__(new_obj_name, v)
try:
if top_node_obj_list and len(top_node_obj_list) > 0:
return top_node_obj_list
else:
return new_class
except:
pass
def depth_finder(self, d, depth=0):
if isinstance(d, list):
if not isinstance(d, dict) or not d or not isinstance(d, list):
return depth
elif isinstance(d, dict):
depth
if not isinstance(d, dict) or not d:
return depth
return max(self.depth_finder(v, depth + 1) for k, v in d.iteritems())
def get(self, query_params=None, **kwargs):
if len(kwargs) > 0:
self.a10_url_update(**kwargs)
else:
self.a10_url_parent()
request = self.DeviceProxy.GET(self, query_params)
try:
#Hack decode unicode something:zli, bug:237218
hack = kwargs.get("json_before_load", None)
if hack and callable(hack):
try:
request = hack(request)
except:
pass
request = json.loads(request, encoding='utf-8')
except:
if request is None:
self.ERROR_MSG = "None Returned"
return self
elif 'response' in request and request['response']['err']:
self.ERROR_MSG = request
return self
elif "Session Timeout" in request:
self.ERROR_MSG = request
return self
if request is None:
self.ERROR_MSG = "None Returned"
return self
elif not isinstance(request, dict):
self.ERROR_MSG = 'Invalid Response'
return self
# elif 'response' in request and request['response']['err']:
elif 'response' in request and request.get('response').get('err', None):
self.ERROR_MSG = request
return self
elif "Session Timeout" in request:
self.ERROR_MSG = request
return self
if len(kwargs) > 0:
temp_object = self.Factory(self.DeviceProxy, request, name=1)
try:
temp_object.__setattr__("_HTTP_RESPONSE", self._HTTP_RESPONSE)
temp_object.__setattr__("DEBUG_CONNECTION", self.DEBUG_CONNECTION)
temp_object.__setattr__("DEBUG_Payload", self.DEBUG_Payload)
temp_object.__setattr__("DEBUG_Response", self.DEBUG_Response)
temp_object.__setattr__("DEBUG_URL", self.DEBUG_URL)
temp_object.__setattr__("DEBUG_headers", self.DEBUG_headers)
except Exception:
pass
return temp_object
else:
r_list = []
for k, v in request.items():
if "List" in k:
wrapper_key = k.replace("-list", "", 1)
elif "list" in k:
wrapper_key = k.replace("-list", "", 1)
else:
wrapper_key = k
if wrapper_key[len(wrapper_key) - 1] == "_":
wrapper_key = wrapper_key[:len(wrapper_key) - 1]
if isinstance(v, list):
for i in v:
new_object = self.Factory(self.DeviceProxy, {wrapper_key: i}, name=0, parent="")
new_object.__setattr__("_HTTP_RESPONSE", self._HTTP_RESPONSE)
new_object.__setattr__("DEBUG_CONNECTION", self.DEBUG_CONNECTION)
new_object.__setattr__("DEBUG_Payload", self.DEBUG_Payload)
new_object.__setattr__("DEBUG_Response", self.DEBUG_Response)
new_object.__setattr__("DEBUG_URL", self.DEBUG_URL)
new_object.__setattr__("DEBUG_headers", self.DEBUG_headers)
r_list.append(new_object)
else:
try:
new_object = self.Factory(self.DeviceProxy, request, name=1)
new_object.__setattr__("_HTTP_RESPONSE", self._HTTP_RESPONSE)
new_object.__setattr__("DEBUG_CONNECTION", self.DEBUG_CONNECTION)
new_object.__setattr__("DEBUG_Payload", self.DEBUG_Payload)
new_object.__setattr__("DEBUG_Response", self.DEBUG_Response)
new_object.__setattr__("DEBUG_URL", self.DEBUG_URL)
new_object.__setattr__("DEBUG_headers", self.DEBUG_headers)
except:
pass
return new_object
return r_list
def get_stream_response(self, **kwargs):
A10BaseClass.is_POST = True
o_url = self.a10_url
if len(kwargs) > 0:
self.a10_url_update(**kwargs)
else:
self.a10_url_parent()
response = self.DeviceProxy.POST(self)
self.a10_url = o_url
return response
def create(self, **kwargs):
A10BaseClass.is_POST = True
o_url = self.a10_url
if len(kwargs) > 0:
self.a10_url_update(**kwargs)
else:
self.a10_url_parent()
response = self.response_handler(self.DeviceProxy.POST(self))
self.a10_url = o_url
return response
def update(self, **kwargs):
A10BaseClass.is_POST = True
o_url = self.a10_url
if len(kwargs) > 0:
self.a10_url_update(**kwargs)
else:
self.a10_url_parent()
response = self.response_handler(self.DeviceProxy.POST(self))
self.a10_url = o_url
return response
def replace(self, **kwargs):
A10BaseClass.is_POST = False
o_url = self.a10_url
if len(kwargs) > 0:
self.a10_url_update(**kwargs)
else:
self.a10_url_parent()
response = self.response_handler(self.DeviceProxy.PUT(self))
self.a10_url = o_url
return response
def replace_all(self, obj_list):
A10BaseClass.is_POST = False
o_url = self.a10_url
self.a10_url_parent()
response = self.response_handler(self.DeviceProxy.PUT_ALL(self, obj_list))
self.a10_url = o_url
return response
def create_all(self, obj_list):
A10BaseClass.is_POST = True
o_url = self.a10_url
self.a10_url_parent()
response = self.response_handler(self.DeviceProxy.POST_ALL(self, obj_list))
self.a10_url = o_url
return response
def delete(self, query_params=None, **kwargs):
o_url = self.a10_url
if len(kwargs) > 0:
self.a10_url_update(**kwargs)
else:
self.a10_url_parent()
response = self.response_handler(self.DeviceProxy.DELETE(self, query_params))
self.a10_url = o_url
return response
def a10_url_update(self, **kwargs):
temp_url = self.a10_url
try:
for key, value in kwargs.items():
if isinstance(value, tuple) or isinstance(value, list):
for v in value:
v = urllib.quote_plus(str(v).replace(' ', "%20"))
v = v.replace('%2520', '%20')
self.a10_url = self.a10_url.replace('{%s}' % key, v, 1)
else:
#Python converts white spaces to +
#Modifying this behavior to use %20 encoding instead
value = urllib.quote_plus(str(value).replace(' ', '%20'))
value = value.replace('%2520', '%20')
self.a10_url = self.a10_url.replace('{%s}' % key, value, 1)
#Removing any unresolved keys and removing the +
while(len(kwargs)> 0 and '{' in self.a10_url):
try:
start_index = self.a10_url.index('{')
end_index = self.a10_url.index('}')
self.a10_url = self.a10_url.replace(self.a10_url[start_index:end_index+1], '')
except ValueError as e:
print 'Substring not found', e
break
self.a10_url = self.a10_url.replace('+/', '/')
self.a10_url = self.a10_url.replace('/+', '/')
#zli fixed bug:243614
p = re.compile('\+$')
#self.a10_url = self.a10_url.replace('+', '', -1) if self.a10_url.endswith('+') else self.a10_url
self.a10_url = p.sub('', self.a10_url) if self.a10_url.endswith('+') else self.a10_url
if '{' in self.a10_url:
self.a10_url_parent(**kwargs)
except:
try:
self.a10_url_parent(**kwargs)
except:
self.a10_url = temp_url
def a10_url_override(self, url):
self.a10_url = url
#TODO: Need to build a more intelligent handler.
def a10_url_parent(self, **kwargs):
if "{" in self.a10_url:
p_list = self.a10_url.split("/")
if self.a10_url.endswith('oper'):
temp = self.a10_url.replace(p_list[(len(p_list) - 2)] + '/', "")
self.a10_url = temp
elif self.a10_url.endswith('stats'):
temp = self.a10_url.replace(p_list[(len(p_list) - 2)] + '/', "")
self.a10_url = temp
else:
temp = self.a10_url.replace(p_list[(len(p_list) - 1)], "")
self.a10_url = temp[0:len(temp) - 1]
if len(kwargs) > 0:
try:
self.a10_url = self.a10_url.format(**kwargs)
except:
pass
def get_stats(self, url="", Filters={}, **kwargs):
if url == "":
o_url = self.a10_url
else:
o_url = self.a10_url
self.a10_url = url
if len(kwargs) > 0:
try:
self.a10_url = self.a10_url.format(**kwargs)
except:
pass
"""
If you are utilizing the stats class &| you want to get stats from a parent.
"""
if "{" in self.a10_url:
self.a10_url = re.match(r'^(/.+?/{)', self.a10_url).group(0).replace("/{", "")
if "stats" not in self.a10_url:
self.a10_url = self.a10_url + "/stats"
if len(Filters) > 0:
query = urllib.urlencode(**Filters)
endpoint = self.a10_url + "?" + query
temp_url = self.a10_url
self.a10_url = endpoint
response = self.response_handler(self.DeviceProxy.GET(self), False)
self.a10_url = o_url
return response
def del_stats(self, url="", Filters={}, **kwargs):
if url == "":
o_url = self.a10_url
else:
o_url = self.a10_url
self.a10_url = url
if len(kwargs) > 0:
try:
self.a10_url = self.a10_url.format(**kwargs)
except:
pass
"""
If you are utilizing the stats class &| you want to get stats from a parent.
"""
if "{" in self.a10_url:
self.a10_url = re.match(r'^(/.+?/{)', self.a10_url).group(0).replace("/{", "")
if "stats" not in self.a10_url:
self.a10_url = self.a10_url + "/stats"
if len(Filters) > 0:
query = urllib.urlencode(**Filters)
endpoint = self.a10_url + "?" + query
temp_url = self.a10_url
self.a10_url = endpoint
response = self.response_handler(self.DeviceProxy.DELETE(self), False)
self.a10_url = o_url
return response
def get_oper(self, url="", Filters={}, **kwargs):
if url == "":
o_url = self.a10_url
else:
o_url = self.a10_url
self.a10_url = url
if len(kwargs) > 0:
try:
self.a10_url = self.a10_url.format(**kwargs)
except:
pass
"""
If you are utilizing the stats class &| you want to get stats from a parent.
"""
if "{" in self.a10_url:
self.a10_url = re.match(r'^(/.+?/{)', self.a10_url).group(0).replace("/{", "")
if "oper" not in self.a10_url:
self.a10_url = self.a10_url + "/oper"
if len(Filters) > 0:
query = urllib.urlencode(**Filters)
endpoint = self.a10_url + "?" + query
temp_url = self.a10_url
self.a10_url = endpoint
response = self.response_handler(self.DeviceProxy.GET(self), False)
self.a10_url = o_url
return response
def response_handler(self, response, r_obj=True):
try:
response = json.loads(response, encoding='utf-8')
except:
pass
try:
if response is None:
self.ERROR_MSG = "None Returned"
return self
elif 'response' in response:
if "err" in response['response']:
self.ERROR_MSG = response
return self
elif "status" in response['response']:
if response['response']['status'] == "OK":
return self
elif "Session Timeout" in response:
self.ERROR_MSG = response
return self
else:
if r_obj == True:
self.ERROR_MSG = ""
return self
else:
return response
except:
self.ERROR_MSG = response
return self
#Helper method for single file upload
def file_upload(self, filename=None, file_handle=None, file_obj=None):
u_fields = [("json", {self.b_key:{"file":filename,
"file-handle":file_handle,
"action":'import'}})]
u_files = [(filename, file_handle, file_obj)]
return self.files_upload(u_fields, u_files)
#Added to address file_upload and downloads
def files_upload(self, fields=[], files=[]):
'''
API only supports one file being uploaded at a time.
:params fields: [("json", {self.b_key:{"file":filename,
"file-handle":file_handle,
"action":action}})]
:params files:[("file", filename, file_obj)]
'''
self.fields = fields
self.files = files
return self.DeviceProxy.post_multipart(self)
def file_download(self, name=None):
'''
This bypasses the factory so the raw file content can be returned.
'''
self.a10_url = self.a10_url + "/" + name
return self.DeviceProxy.multi_part_get(self)
def file_replace(self, filename=None, file_handle=None, file_obj=None):
u_fields = [("json", {self.b_key:{"file":filename,
"file-handle":file_handle,
"action":'import'}})]
u_files = [(filename, file_handle, file_obj)]
return self.files_upload(u_fields, u_files)
def file_delete(self, name=None):
self.a10_url = self.a10_url + "/" + name
return self.DeviceProxy.DELETE(self)
class kwbl():
def kwbl(self, word, key=0):
# if word in dir(__builtins__) or 'copy' in word:
# (echou): Replace to workaround Django vs Python __builtins__
# (raunak): Use the Python builtins to have a consistent behavior with the sdk_generator
python_built_in = ['bytearray', 'IndexError', 'all', 'help', 'vars', 'SyntaxError', 'unicode',
'UnicodeDecodeError', 'memoryview',
'isinstance', 'copyright', 'NameError', 'BytesWarning', 'dict', 'IOError', 'oct', 'bin',
'SystemExit',
'StandardError', 'format', 'TabError', 'sorted', 'False', 'RuntimeWarning', 'list', 'iter',
'reload',
'Warning', '__package__', 'round', 'dir', 'cmp', 'set', 'bytes', 'UnicodeTranslateError',
'intern',
'issubclass', 'Ellipsis', 'EOFError', 'locals', 'BufferError', 'slice', 'FloatingPointError',
'sum', 'getattr',
'abs', 'exit', 'print', 'True', 'FutureWarning', 'ImportWarning', 'None', 'hash',
'ReferenceError', 'len',
'credits', 'frozenset', '__name__', 'ord', 'super', '_', 'TypeError', 'license',
'KeyboardInterrupt',
'UserWarning', 'filter', 'range', 'staticmethod', 'SystemError', 'BaseException', 'pow',
'RuntimeError',
'float', 'GeneratorExit', 'StopIteration', 'globals', 'divmod', 'enumerate', 'apply',
'LookupError', 'open',
'quit', 'basestring', 'UnicodeError', 'zip', 'hex', 'long', 'next', 'ImportError', 'chr',
'__import__', 'type',
'Exception', 'tuple', 'reduce', 'reversed', 'UnicodeEncodeError', 'input', 'hasattr',
'delattr', 'setattr',
'raw_input', 'PendingDeprecationWarning', 'compile', 'ArithmeticError', 'str', 'property',
'MemoryError',
'int', 'xrange', 'KeyError', 'coerce', 'SyntaxWarning', 'file', 'EnvironmentError', 'unichr',
'id', 'OSError',
'DeprecationWarning', 'min', 'UnicodeWarning', 'execfile', 'any', 'complex', 'bool',
'ValueError',
'NotImplemented', 'map', 'buffer', 'max', 'object', 'repr', 'callable', 'ZeroDivisionError',
'eval',
'__debug__', 'IndentationError', 'AssertionError', 'classmethod', 'UnboundLocalError',
'NotImplementedError',
'AttributeError', 'OverflowError']
if word in dir(__builtins__) or 'copy' in word or word in python_built_in:
if key == 0:
return "A10WW_" + word
else:
return "A10_" + word
# (echou): Comment out sys.modules check per discussionw with mthompson and raunaka
# elif word in sys.modules.keys():
# if key == 0:
# return "A10WW_" + word
# else:
# return "A10_" + word
elif word in keyword.kwlist:
if key == 0:
return "A10WW_" + word
else:
return "A10_" + word
elif re.search(r'^[0-9]', word):
if key == 0:
return "A10WW_" + word
else:
return "A10_" + word
else:
return word<|fim▁end|>
| |
<|file_name|>NewContact.py<|end_file_name|><|fim▁begin|>from models.contact import Contacts
from data.data_for_contacts import constant as testdata
import pytest
def test_NewContact(app, db, json_contacts):
contact = json_contacts
old_contacts = db.get_contact_list()<|fim▁hole|> app.contacts.create_contact(contact)
new_contacts = db.get_contact_list()
old_contacts.append(contact)
assert sorted(old_contacts, key=Contacts.contact_id_or_max) == sorted(new_contacts, key=Contacts.contact_id_or_max)<|fim▁end|>
| |
<|file_name|>fake_dbclient.go<|end_file_name|><|fim▁begin|>/*
Copyright 2019 The Vitess Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.<|fim▁hole|>Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package binlogplayer
import (
"fmt"
"strings"
"vitess.io/vitess/go/sqltypes"
)
type fakeDBClient struct {
}
// NewFakeDBClient returns a fake DBClient. Its functions return
// preset responses to requests.
func NewFakeDBClient() DBClient {
return &fakeDBClient{}
}
func (dc *fakeDBClient) DBName() string {
return "db"
}
func (dc *fakeDBClient) Connect() error {
return nil
}
func (dc *fakeDBClient) Begin() error {
return nil
}
func (dc *fakeDBClient) Commit() error {
return nil
}
func (dc *fakeDBClient) Rollback() error {
return nil
}
func (dc *fakeDBClient) Close() {
}
func (dc *fakeDBClient) ExecuteFetch(query string, maxrows int) (qr *sqltypes.Result, err error) {
query = strings.ToLower(query)
switch {
case strings.HasPrefix(query, "insert"):
return &sqltypes.Result{InsertID: 1}, nil
case strings.HasPrefix(query, "update"):
return &sqltypes.Result{RowsAffected: 1}, nil
case strings.HasPrefix(query, "delete"):
return &sqltypes.Result{RowsAffected: 1}, nil
case strings.HasPrefix(query, "select"):
if strings.Contains(query, "where") {
return sqltypes.MakeTestResult(
sqltypes.MakeTestFields(
"id|state|source",
"int64|varchar|varchar",
),
`1|Running|keyspace:"ks" shard:"0" key_range:<end:"\200" > `,
), nil
}
return &sqltypes.Result{}, nil
case strings.HasPrefix(query, "use"):
return &sqltypes.Result{}, nil
}
return nil, fmt.Errorf("unexpected: %v", query)
}<|fim▁end|>
|
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
|
<|file_name|>msg.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2007-2016 Contributors as noted in the AUTHORS file
This file is part of libzmq, the ZeroMQ core engine in C++.
libzmq is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
As a special exception, the Contributors give you permission to link
this library with independent modules to produce an executable,
regardless of the license terms of these independent modules, and to
copy and distribute the resulting executable under terms of your choice,
provided that you also meet, for each linked independent module, the
terms and conditions of the license of that module. An independent
module is a module which is not derived from or based on this library.
If you modify this library, you must extend this exception to your
version of the library.
libzmq is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "macros.hpp"
#include "msg.hpp"
#include "../include/zmq.h"
#include <string.h>
#include <stdlib.h>
#include <new>
#include "stdint.hpp"
#include "likely.hpp"
#include "metadata.hpp"
#include "err.hpp"
// Check whether the sizes of public representation of the message (zmq_msg_t)
// and private representation of the message (zmq::msg_t) match.
typedef char zmq_msg_size_check
[2 * ((sizeof (zmq::msg_t) == sizeof (zmq_msg_t)) != 0) - 1];
bool zmq::msg_t::check ()
{
return u.base.type >= type_min && u.base.type <= type_max;
}
int zmq::msg_t::init (void* data_, size_t size_,
msg_free_fn* ffn_, void* hint,
content_t* content_)
{
if (size_ < max_vsm_size) {
int const rc = init_size(size_);
if (rc != -1)
{
memcpy(data(), data_, size_);
return 0;
}
else
{
return -1;
}
}
else if(content_)
{
return init_external_storage(content_, data_, size_, ffn_, hint);
}
else
{
return init_data(data_, size_, ffn_, hint);
}
}
int zmq::msg_t::init ()
{
u.vsm.metadata = NULL;
u.vsm.type = type_vsm;
u.vsm.flags = 0;
u.vsm.size = 0;
u.vsm.group[0] = '\0';
u.vsm.routing_id = 0;
u.vsm.fd = retired_fd;
return 0;
}
int zmq::msg_t::init_size (size_t size_)
{
if (size_ <= max_vsm_size) {
u.vsm.metadata = NULL;
u.vsm.type = type_vsm;
u.vsm.flags = 0;
u.vsm.size = (unsigned char) size_;
u.vsm.group[0] = '\0';
u.vsm.routing_id = 0;
u.vsm.fd = retired_fd;
}
else {
u.lmsg.metadata = NULL;
u.lmsg.type = type_lmsg;
u.lmsg.flags = 0;
u.lmsg.group[0] = '\0';
u.lmsg.routing_id = 0;
u.lmsg.fd = retired_fd;
u.lmsg.content = NULL;
if (sizeof (content_t) + size_ > size_)
u.lmsg.content = (content_t*) malloc (sizeof (content_t) + size_);
if (unlikely (!u.lmsg.content)) {
errno = ENOMEM;
return -1;
}
u.lmsg.content->data = u.lmsg.content + 1;
u.lmsg.content->size = size_;
u.lmsg.content->ffn = NULL;
u.lmsg.content->hint = NULL;
new (&u.lmsg.content->refcnt) zmq::atomic_counter_t ();
}
return 0;
}
int zmq::msg_t::init_external_storage(content_t* content_, void* data_, size_t size_,
msg_free_fn *ffn_, void* hint_)
{
zmq_assert(NULL != data_);
zmq_assert(NULL != content_);
u.zclmsg.metadata = NULL;
u.zclmsg.type = type_zclmsg;
u.zclmsg.flags = 0;
u.zclmsg.group[0] = '\0';
u.zclmsg.routing_id = 0;
u.zclmsg.fd = retired_fd;
u.zclmsg.content = content_;
u.zclmsg.content->data = data_;
u.zclmsg.content->size = size_;
u.zclmsg.content->ffn = ffn_;
u.zclmsg.content->hint = hint_;
new (&u.zclmsg.content->refcnt) zmq::atomic_counter_t();
return 0;
}
int zmq::msg_t::init_data (void *data_, size_t size_,
msg_free_fn *ffn_, void *hint_)
{
// If data is NULL and size is not 0, a segfault
// would occur once the data is accessed
zmq_assert (data_ != NULL || size_ == 0);
// Initialize constant message if there's no need to deallocate
if (ffn_ == NULL) {
u.cmsg.metadata = NULL;
u.cmsg.type = type_cmsg;
u.cmsg.flags = 0;
u.cmsg.data = data_;
u.cmsg.size = size_;
u.cmsg.group[0] = '\0';
u.cmsg.routing_id = 0;
u.cmsg.fd = retired_fd;
}
else {
u.lmsg.metadata = NULL;
u.lmsg.type = type_lmsg;
u.lmsg.flags = 0;
u.lmsg.group[0] = '\0';
u.lmsg.routing_id = 0;
u.lmsg.fd = retired_fd;
u.lmsg.content = (content_t*) malloc (sizeof (content_t));
if (!u.lmsg.content) {
errno = ENOMEM;
return -1;
}
u.lmsg.content->data = data_;
u.lmsg.content->size = size_;
u.lmsg.content->ffn = ffn_;
u.lmsg.content->hint = hint_;
new (&u.lmsg.content->refcnt) zmq::atomic_counter_t ();
}
return 0;
}
int zmq::msg_t::init_delimiter ()
{
u.delimiter.metadata = NULL;
u.delimiter.type = type_delimiter;
u.delimiter.flags = 0;
u.delimiter.group[0] = '\0';
u.delimiter.routing_id = 0;
u.delimiter.fd = retired_fd;
return 0;
}
int zmq::msg_t::init_join ()
{
u.base.metadata = NULL;
u.base.type = type_join;
u.base.flags = 0;
u.base.group[0] = '\0';
u.base.routing_id = 0;
u.base.fd = retired_fd;
return 0;
}
int zmq::msg_t::init_leave ()
{
u.base.metadata = NULL;
u.base.type = type_leave;
u.base.flags = 0;
u.base.group[0] = '\0';
u.base.routing_id = 0;
u.base.fd = retired_fd;
return 0;
}
int zmq::msg_t::close ()
{
// Check the validity of the message.
if (unlikely (!check ())) {
errno = EFAULT;
return -1;
}
if (u.base.type == type_lmsg) {
// If the content is not shared, or if it is shared and the reference
// count has dropped to zero, deallocate it.
if (!(u.lmsg.flags & msg_t::shared) ||
!u.lmsg.content->refcnt.sub (1)) {
// We used "placement new" operator to initialize the reference
// counter so we call the destructor explicitly now.
u.lmsg.content->refcnt.~atomic_counter_t ();
if (u.lmsg.content->ffn)
u.lmsg.content->ffn (u.lmsg.content->data,
u.lmsg.content->hint);
free (u.lmsg.content);
}
}
if (is_zcmsg())
{
zmq_assert( u.zclmsg.content->ffn );
// If the content is not shared, or if it is shared and the reference
// count has dropped to zero, deallocate it.
if (!(u.zclmsg.flags & msg_t::shared) ||
!u.zclmsg.content->refcnt.sub (1)) {
// We used "placement new" operator to initialize the reference
// counter so we call the destructor explicitly now.
u.zclmsg.content->refcnt.~atomic_counter_t ();
u.zclmsg.content->ffn (u.zclmsg.content->data,
u.zclmsg.content->hint);
}
}
if (u.base.metadata != NULL) {
if (u.base.metadata->drop_ref ()) {
LIBZMQ_DELETE(u.base.metadata);
}
u.base.metadata = NULL;
}
// Make the message invalid.
u.base.type = 0;
return 0;
}
int zmq::msg_t::move (msg_t &src_)
{
// Check the validity of the source.
if (unlikely (!src_.check ())) {
errno = EFAULT;
return -1;
}
int rc = close ();
if (unlikely (rc < 0))
return rc;
*this = src_;
rc = src_.init ();
if (unlikely (rc < 0))
return rc;
return 0;
}
int zmq::msg_t::copy (msg_t &src_)
{
// Check the validity of the source.
if (unlikely (!src_.check ())) {
errno = EFAULT;
return -1;
}
int rc = close ();
if (unlikely (rc < 0))
return rc;
if (src_.u.base.type == type_lmsg ) {
// One reference is added to shared messages. Non-shared messages
// are turned into shared messages and reference count is set to 2.
if (src_.u.lmsg.flags & msg_t::shared)
src_.u.lmsg.content->refcnt.add (1);
else {
src_.u.lmsg.flags |= msg_t::shared;
src_.u.lmsg.content->refcnt.set (2);
}
}
if (src_.is_zcmsg()) {
// One reference is added to shared messages. Non-shared messages
// are turned into shared messages and reference count is set to 2.
if (src_.u.zclmsg.flags & msg_t::shared)
src_.refcnt()->add (1);
else {
src_.u.zclmsg.flags |= msg_t::shared;
src_.refcnt()->set (2);
}
}
if (src_.u.base.metadata != NULL)
src_.u.base.metadata->add_ref ();
*this = src_;
return 0;
}
void *zmq::msg_t::data ()
{
// Check the validity of the message.
zmq_assert (check ());
switch (u.base.type) {
case type_vsm:
return u.vsm.data;
case type_lmsg:
return u.lmsg.content->data;
case type_cmsg:
return u.cmsg.data;
case type_zclmsg:
return u.zclmsg.content->data;
default:
zmq_assert (false);
return NULL;
}
}
size_t zmq::msg_t::size ()
{
// Check the validity of the message.
zmq_assert (check ());
switch (u.base.type) {
case type_vsm:
return u.vsm.size;
case type_lmsg:
return u.lmsg.content->size;
case type_zclmsg:
return u.zclmsg.content->size;
case type_cmsg:
return u.cmsg.size;
default:
zmq_assert (false);
return 0;
}
}
unsigned char zmq::msg_t::flags ()
{
return u.base.flags;
}
void zmq::msg_t::set_flags (unsigned char flags_)
{
u.base.flags |= flags_;
}
void zmq::msg_t::reset_flags (unsigned char flags_)
{
u.base.flags &= ~flags_;
}
zmq::fd_t zmq::msg_t::fd ()
{
return u.base.fd;
}
void zmq::msg_t::set_fd (fd_t fd_)
{
u.base.fd = fd_;
}
zmq::metadata_t *zmq::msg_t::metadata () const
{
return u.base.metadata;
}
void zmq::msg_t::set_metadata (zmq::metadata_t *metadata_)
{
assert (metadata_ != NULL);
assert (u.base.metadata == NULL);
metadata_->add_ref ();
u.base.metadata = metadata_;
}
void zmq::msg_t::reset_metadata ()
{
if (u.base.metadata) {
if (u.base.metadata->drop_ref ()) {
LIBZMQ_DELETE(u.base.metadata);
}
u.base.metadata = NULL;
}
}
bool zmq::msg_t::is_identity () const
{
return (u.base.flags & identity) == identity;
}
bool zmq::msg_t::is_credential () const
{
return (u.base.flags & credential) == credential;
}
bool zmq::msg_t::is_delimiter () const
{
return u.base.type == type_delimiter;
}
bool zmq::msg_t::is_vsm () const
{
return u.base.type == type_vsm;
}
bool zmq::msg_t::is_cmsg () const
{
return u.base.type == type_cmsg;
}
bool zmq::msg_t::is_zcmsg() const
{
return u.base.type == type_zclmsg;
}
bool zmq::msg_t::is_join() const
{
return u.base.type == type_join;
}
bool zmq::msg_t::is_leave() const
{
return u.base.type == type_leave;
}
void zmq::msg_t::add_refs (int refs_)
{
zmq_assert (refs_ >= 0);
// Operation not supported for messages with metadata.
zmq_assert (u.base.metadata == NULL);
// No copies required.
if (!refs_)
return;
// VSMs, CMSGS and delimiters can be copied straight away. The only
// message type that needs special care are long messages.
if (u.base.type == type_lmsg || is_zcmsg() ) {
if (u.base.flags & msg_t::shared)
refcnt()->add (refs_);
else {
refcnt()->set (refs_ + 1);
u.base.flags |= msg_t::shared;
}
}
}
bool zmq::msg_t::rm_refs (int refs_)
{
zmq_assert (refs_ >= 0);
<|fim▁hole|> // Operation not supported for messages with metadata.
zmq_assert (u.base.metadata == NULL);
// No copies required.
if (!refs_)
return true;
// If there's only one reference close the message.
if ( (u.base.type != type_zclmsg && u.base.type != type_lmsg) || !(u.base.flags & msg_t::shared)) {
close ();
return false;
}
// The only message type that needs special care are long and zcopy messages.
if (u.base.type == type_lmsg && !u.lmsg.content->refcnt.sub(refs_)) {
// We used "placement new" operator to initialize the reference
// counter so we call the destructor explicitly now.
u.lmsg.content->refcnt.~atomic_counter_t ();
if (u.lmsg.content->ffn)
u.lmsg.content->ffn (u.lmsg.content->data, u.lmsg.content->hint);
free (u.lmsg.content);
return false;
}
if (is_zcmsg() && !u.zclmsg.content->refcnt.sub(refs_)) {
// storage for rfcnt is provided externally
if (u.zclmsg.content->ffn) {
u.zclmsg.content->ffn(u.zclmsg.content->data, u.zclmsg.content->hint);
}
return false;
}
return true;
}
uint32_t zmq::msg_t::get_routing_id ()
{
return u.base.routing_id;
}
int zmq::msg_t::set_routing_id (uint32_t routing_id_)
{
if (routing_id_) {
u.base.routing_id = routing_id_;
return 0;
}
errno = EINVAL;
return -1;
}
int zmq::msg_t::reset_routing_id ()
{
u.base.routing_id = 0;
return 0;
}
const char * zmq::msg_t::group ()
{
return u.base.group;
}
int zmq::msg_t::set_group (const char * group_)
{
return set_group (group_, strlen (group_));
}
int zmq::msg_t::set_group (const char * group_, size_t length_)
{
if (length_> ZMQ_GROUP_MAX_LENGTH)
{
errno = EINVAL;
return -1;
}
strncpy (u.base.group, group_, length_);
u.base.group[length_] = '\0';
return 0;
}
zmq::atomic_counter_t *zmq::msg_t::refcnt()
{
switch(u.base.type)
{
case type_lmsg:
return &u.lmsg.content->refcnt;
case type_zclmsg:
return &u.zclmsg.content->refcnt;
default:
zmq_assert(false);
return NULL;
}
}<|fim▁end|>
| |
<|file_name|>IdentityProvider.java<|end_file_name|><|fim▁begin|>/*
* SonarQube
* Copyright (C) 2009-2016 SonarSource SA<|fim▁hole|> * mailto:contact AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.api.server.authentication;
import org.sonar.api.server.ServerSide;
/**
* Entry-point to define a new Identity provider.
* Only one of this two interfaces can be used :
* <ul>
* <li>{@link OAuth2IdentityProvider} for OAuth2 authentication</li>
* <li>{@link BaseIdentityProvider} for other kind of authentication</li>
* </ul>
*
* @since 5.4
*/
@ServerSide
public interface IdentityProvider {
/**
* Unique key of provider, for example "github".
* Must not be blank.
*/
String getKey();
/**
* Name displayed in login form.
* Must not be blank.
*/
String getName();
/**
* Display information for the login form
*/
Display getDisplay();
/**
* Is the provider fully configured and enabled ? If {@code true}, then
* the provider is available in login form.
*/
boolean isEnabled();
/**
* Can users sign-up (connecting with their account for the first time) ? If {@code true},
* then users can register and create their account into SonarQube, else only already
* registered users can login.
*/
boolean allowsUsersToSignUp();
}<|fim▁end|>
| |
<|file_name|>GoldStandard.java<|end_file_name|><|fim▁begin|>package de.uni_koeln.spinfo.textengineering.ir.eval;
import java.util.ArrayList;
import java.util.List;
import de.uni_koeln.spinfo.textengineering.ir.basic.Work;
import de.uni_koeln.spinfo.textengineering.ir.boole.PositionalIndex;
import de.uni_koeln.spinfo.textengineering.ir.preprocess.Preprocessor;
/*
* Erstellung eines Dummy-Goldstandards auf Grundlage unseres Shakespeare-Korpus
*/
public class GoldStandard {
public static List<Integer> create(PositionalIndex index, String query) {
List<Integer> result = new ArrayList<Integer>();
Preprocessor p = new Preprocessor();
List<String> q = p.tokenize(query);
int docId = 0;
for (Work d : index.getWorks()) {
/*
* Für unsere Experimente mit P, R und F betrachten wir ein Dokument immer dann als relevant, wenn ein Term
* der Anfrage im Titel des Dokuments enthalten ist:
*/
if (containsAny(d.getTitle(), q)) {
result.add(docId);<|fim▁hole|> }
docId++;
}
return result;
}
private static boolean containsAny(String title, List<String> query) {
for (String q : query) {
/* Wir geben true zurück wenn ein Element der Anfrage im Titel enthalten ist: */
if (title.toLowerCase().contains(q.toLowerCase())) {
return true;
}
}
return false;
}
}<|fim▁end|>
| |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Views for the verification flow
"""
import json
import logging
import decimal
from mitxmako.shortcuts import render_to_response
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseRedirect
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.views.generic.base import View
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from django.utils.http import urlencode
from django.contrib.auth.decorators import login_required
from course_modes.models import CourseMode
from student.models import CourseEnrollment
from student.views import course_from_id
from shoppingcart.models import Order, CertificateItem
from shoppingcart.processors.CyberSource import (
get_signed_purchase_params, get_purchase_endpoint
)
from verify_student.models import SoftwareSecurePhotoVerification
import ssencrypt
log = logging.getLogger(__name__)
class VerifyView(View):
@method_decorator(login_required)
def get(self, request, course_id):
"""
Displays the main verification view, which contains three separate steps:
- Taking the standard face photo
- Taking the id photo
- Confirming that the photos and payment price are correct
before proceeding to payment
"""
upgrade = request.GET.get('upgrade', False)
# If the user has already been verified within the given time period,
# redirect straight to the payment -- no need to verify again.
if SoftwareSecurePhotoVerification.user_has_valid_or_pending(request.user):
return redirect(
reverse('verify_student_verified',
kwargs={'course_id': course_id}) + "?upgrade={}".format(upgrade)
)
elif CourseEnrollment.enrollment_mode_for_user(request.user, course_id) == 'verified':
return redirect(reverse('dashboard'))
else:
# If they haven't completed a verification attempt, we have to
# restart with a new one. We can't reuse an older one because we
# won't be able to show them their encrypted photo_id -- it's easier
# bookkeeping-wise just to start over.
progress_state = "start"
verify_mode = CourseMode.mode_for_course(course_id, "verified")
# if the course doesn't have a verified mode, we want to kick them
# from the flow
if not verify_mode:
return redirect(reverse('dashboard'))
if course_id in request.session.get("donation_for_course", {}):
chosen_price = request.session["donation_for_course"][course_id]
else:
chosen_price = verify_mode.min_price
course = course_from_id(course_id)
context = {
"progress_state": progress_state,
"user_full_name": request.user.profile.name,
"course_id": course_id,
"course_name": course.display_name_with_default,
"course_org": course.display_org_with_default,
"course_num": course.display_number_with_default,
"purchase_endpoint": get_purchase_endpoint(),
"suggested_prices": [
decimal.Decimal(price)
for price in verify_mode.suggested_prices.split(",")
],
"currency": verify_mode.currency.upper(),
"chosen_price": chosen_price,
"min_price": verify_mode.min_price,
"upgrade": upgrade,
}
return render_to_response('verify_student/photo_verification.html', context)
class VerifiedView(View):
"""
View that gets shown once the user has already gone through the
verification flow
"""
@method_decorator(login_required)
def get(self, request, course_id):
"""
Handle the case where we have a get request
"""
upgrade = request.GET.get('upgrade', False)
if CourseEnrollment.enrollment_mode_for_user(request.user, course_id) == 'verified':
return redirect(reverse('dashboard'))
verify_mode = CourseMode.mode_for_course(course_id, "verified")
if course_id in request.session.get("donation_for_course", {}):
chosen_price = request.session["donation_for_course"][course_id]
else:
chosen_price = verify_mode.min_price.format("{:g}")
course = course_from_id(course_id)
context = {
"course_id": course_id,
"course_name": course.display_name_with_default,
"course_org": course.display_org_with_default,
"course_num": course.display_number_with_default,
"purchase_endpoint": get_purchase_endpoint(),
"currency": verify_mode.currency.upper(),
"chosen_price": chosen_price,
"upgrade": upgrade,
}
return render_to_response('verify_student/verified.html', context)
@login_required
def create_order(request):
"""
Submit PhotoVerification and create a new Order for this verified cert
"""
if not SoftwareSecurePhotoVerification.user_has_valid_or_pending(request.user):
attempt = SoftwareSecurePhotoVerification(user=request.user)
b64_face_image = request.POST['face_image'].split(",")[1]
b64_photo_id_image = request.POST['photo_id_image'].split(",")[1]
attempt.upload_face_image(b64_face_image.decode('base64'))
attempt.upload_photo_id_image(b64_photo_id_image.decode('base64'))
attempt.mark_ready()
attempt.save()
course_id = request.POST['course_id']
donation_for_course = request.session.get('donation_for_course', {})
current_donation = donation_for_course.get(course_id, decimal.Decimal(0))
contribution = request.POST.get("contribution", donation_for_course.get(course_id, 0))
try:
amount = decimal.Decimal(contribution).quantize(decimal.Decimal('.01'), rounding=decimal.ROUND_DOWN)
except decimal.InvalidOperation:
return HttpResponseBadRequest(_("Selected price is not valid number."))
if amount != current_donation:
donation_for_course[course_id] = amount
request.session['donation_for_course'] = donation_for_course
verified_mode = CourseMode.modes_for_course_dict(course_id).get('verified', None)
# make sure this course has a verified mode
if not verified_mode:
return HttpResponseBadRequest(_("This course doesn't support verified certificates"))
if amount < verified_mode.min_price:
return HttpResponseBadRequest(_("No selected price or selected price is below minimum."))
# I know, we should check this is valid. All kinds of stuff missing here
cart = Order.get_cart_for_user(request.user)
cart.clear()
CertificateItem.add_to_order(cart, course_id, amount, 'verified')
params = get_signed_purchase_params(cart)
return HttpResponse(json.dumps(params), content_type="text/json")
@require_POST
@csrf_exempt # SS does its own message signing, and their API won't have a cookie value
def results_callback(request):
"""
Software Secure will call this callback to tell us whether a user is
verified to be who they said they are.
"""
body = request.body
try:
body_dict = json.loads(body)
except ValueError:
log.exception("Invalid JSON received from Software Secure:\n\n{}\n".format(body))
return HttpResponseBadRequest("Invalid JSON. Received:\n\n{}".format(body))
if not isinstance(body_dict, dict):
log.error("Reply from Software Secure is not a dict:\n\n{}\n".format(body))
return HttpResponseBadRequest("JSON should be dict. Received:\n\n{}".format(body))
headers = {
"Authorization": request.META.get("HTTP_AUTHORIZATION", ""),
"Date": request.META.get("HTTP_DATE", "")
}
sig_valid = ssencrypt.has_valid_signature(
"POST",
headers,
body_dict,
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"],
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_SECRET_KEY"]
)
_response, access_key_and_sig = headers["Authorization"].split(" ")
access_key = access_key_and_sig.split(":")[0]
# This is what we should be doing...
#if not sig_valid:
# return HttpResponseBadRequest("Signature is invalid")
# This is what we're doing until we can figure out why we disagree on sigs
if access_key != settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"]:
return HttpResponseBadRequest("Access key invalid")
receipt_id = body_dict.get("EdX-ID")
result = body_dict.get("Result")
reason = body_dict.get("Reason", "")
error_code = body_dict.get("MessageType", "")
try:
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=receipt_id)
except SoftwareSecurePhotoVerification.DoesNotExist:
log.error("Software Secure posted back for receipt_id {}, but not found".format(receipt_id))
return HttpResponseBadRequest("edX ID {} not found".format(receipt_id))
if result == "PASS":
log.debug("Approving verification for {}".format(receipt_id))
attempt.approve()
elif result == "FAIL":
log.debug("Denying verification for {}".format(receipt_id))
attempt.deny(json.dumps(reason), error_code=error_code)
elif result == "SYSTEM FAIL":
log.debug("System failure for {} -- resetting to must_retry".format(receipt_id))
attempt.system_error(json.dumps(reason), error_code=error_code)
log.error("Software Secure callback attempt for %s failed: %s", receipt_id, reason)
else:
log.error("Software Secure returned unknown result {}".format(result))
return HttpResponseBadRequest(
"Result {} not understood. Known results: PASS, FAIL, SYSTEM FAIL".format(result)
)
return HttpResponse("OK!")
@login_required
def show_requirements(request, course_id):
"""
Show the requirements necessary for the verification flow.
"""
if CourseEnrollment.enrollment_mode_for_user(request.user, course_id) == 'verified':<|fim▁hole|> return redirect(reverse('dashboard'))
upgrade = request.GET.get('upgrade', False)
course = course_from_id(course_id)
context = {
"course_id": course_id,
"course_name": course.display_name_with_default,
"course_org": course.display_org_with_default,
"course_num": course.display_number_with_default,
"is_not_active": not request.user.is_active,
"upgrade": upgrade,
}
return render_to_response("verify_student/show_requirements.html", context)
class ReverifyView(View):
"""
The main reverification view. Under similar constraints as the main verification view.
Has to perform these functions:
- take new face photo
- take new id photo
- submit photos to photo verification service
Does not need to be attached to a particular course.
Does not need to worry about pricing
"""
@method_decorator(login_required)
def get(self, request):
"""
display this view
"""
context = {
"user_full_name": request.user.profile.name,
"error": False,
}
return render_to_response("verify_student/photo_reverification.html", context)
@method_decorator(login_required)
def post(self, request):
"""
submits the reverification to SoftwareSecure
"""
try:
attempt = SoftwareSecurePhotoVerification(user=request.user)
b64_face_image = request.POST['face_image'].split(",")[1]
b64_photo_id_image = request.POST['photo_id_image'].split(",")[1]
attempt.upload_face_image(b64_face_image.decode('base64'))
attempt.upload_photo_id_image(b64_photo_id_image.decode('base64'))
attempt.mark_ready()
# save this attempt
attempt.save()
# then submit it across
attempt.submit()
return HttpResponseRedirect(reverse('verify_student_reverification_confirmation'))
except Exception:
log.exception(
"Could not submit verification attempt for user {}".format(request.user.id)
)
context = {
"user_full_name": request.user.profile.name,
"error": True,
}
return render_to_response("verify_student/photo_reverification.html", context)
@login_required
def reverification_submission_confirmation(_request):
"""
Shows the user a confirmation page if the submission to SoftwareSecure was successful
"""
return render_to_response("verify_student/reverification_confirmation.html")<|fim▁end|>
| |
<|file_name|>album.rs<|end_file_name|><|fim▁begin|>use postgres;
use uuid::Uuid;
use std::fmt;
use chrono::{NaiveDateTime, Utc};
use apple_music;
use spotify;
use error::Error;
use super::{conn, Model};
use model::provider::Provider;
use model::state::State;
use model::enclosure::Enclosure;
use model::track::Track;
use model::artist::Artist;
static PROPS: [&'static str; 14] = ["id",
"provider",
"identifier",
"owner_id",
"owner_name",
"url",
"title",
"description",
"thumbnail_url",
"artwork_url",
"published_at",
"created_at",
"updated_at",
"state"];
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Album {
pub id: Uuid,
pub provider: Provider,
pub identifier: String,
pub owner_id: Option<String>,
pub owner_name: Option<String>,
pub url: String,
pub title: String,
pub description: Option<String>,
pub thumbnail_url: Option<String>,
pub artwork_url: Option<String>,
pub published_at: NaiveDateTime,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
pub state: State,
pub tracks: Vec<Track>,
pub artists: Option<Vec<Artist>>,
}
impl PartialEq for Album {
fn eq(&self, p: &Album) -> bool {
return self.identifier == p.identifier && self.provider == p.provider
}
}
impl fmt::Display for Album {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}:{}", self.provider, self.identifier)
}
}
impl<'a> Model<'a> for Album {
fn table_name() -> String { "albums".to_string() }
fn props_str(prefix: &str) -> String {
PROPS
.iter()
.map(|&p| format!("{}{}", prefix, p))
.collect::<Vec<String>>().join(",")<|fim▁hole|> fn row_to_item(row: postgres::rows::Row) -> Album {
Album {
id: row.get(0),
provider: Provider::new(row.get(1)),
identifier: row.get(2),
owner_id: row.get(3),
owner_name: row.get(4),
url: row.get(5),
title: row.get(6),
description: row.get(7),
thumbnail_url: row.get(8),
artwork_url: row.get(9),
published_at: row.get(10),
created_at: row.get(11),
updated_at: row.get(12),
state: State::new(row.get(13)),
tracks: vec![],
artists: None,
}
}
fn create(&self) -> Result<Album, Error> {
let conn = conn()?;
let stmt = conn.prepare("INSERT INTO albums (provider, identifier, url, title)
VALUES ($1, $2, $3, $4) RETURNING id")?;
let rows = stmt.query(&[&self.provider.to_string(), &self.identifier, &self.url, &self.title])?;
let mut album = self.clone();
for row in rows.iter() {
album.id = row.get(0);
}
Ok(album)
}
fn save(&mut self) -> Result<(), Error> {
self.updated_at = Utc::now().naive_utc();
let conn = conn()?;
let stmt = conn.prepare("UPDATE albums SET
provider = $2,
identifier = $3,
owner_id = $4,
owner_name = $5,
url = $6,
title = $7,
description = $8,
thumbnail_url = $9,
artwork_url = $10,
published_at = $11,
created_at = $12,
updated_at = $13,
state = $14
WHERE id = $1")?;
let result = stmt.query(&[&self.id,
&self.provider.to_string(),
&self.identifier,
&self.owner_id,
&self.owner_name,
&self.url,
&self.title,
&self.description,
&self.thumbnail_url,
&self.artwork_url,
&self.published_at,
&self.created_at,
&self.updated_at,
&self.state.to_string(),
]);
match result {
Ok(_) => Ok(()),
Err(_) => Err(Error::Unexpected),
}
}
fn set_relations(albums: &mut Vec<Album>) -> Result<(), Error> {
let ids: Vec<Uuid> = albums.iter().map(|i| i.id).collect();
let tracks_of_album = Track::find_by_albums(&ids)?;
let artists_of_album = Artist::find_by_albums(&ids)?;
for album in albums {
if let Some(ref mut tracks) = tracks_of_album.get(&album.id) {
album.tracks = tracks.clone()
}
if let Some(ref mut artists) = artists_of_album.get(&album.id) {
album.artists = Some(artists.clone())
}
}
Ok(())
}
}
impl<'a> Enclosure<'a> for Album {
fn new(provider: Provider, identifier: String) -> Album {
Album {
id: Uuid::new_v4(),
provider: provider,
identifier: identifier,
owner_id: None,
owner_name: None,
url: "".to_string(),
title: "".to_string(),
description: None,
thumbnail_url: None,
artwork_url: None,
published_at: Utc::now().naive_utc(),
created_at: Utc::now().naive_utc(),
updated_at: Utc::now().naive_utc(),
state: State::Alive,
tracks: vec![],
artists: None,
}
}
fn set_url(&mut self, url: String) -> &mut Album {
self.url = url;
self
}
fn set_owner_id(&mut self, owner_id: Option<String>) -> &mut Album {
self.owner_id = owner_id;
self
}
fn fetch_props(&mut self) -> Result<(), Error> {
match self.provider {
Provider::AppleMusic => {
let country = apple_music::country(&self.url);
match apple_music::fetch_album(&self.identifier, &country) {
Ok(album) => self.update_with_am_album(&album),
Err(_) => self.disable(),
}
}
Provider::Spotify => match spotify::fetch_album(&self.identifier) {
Ok(album) => self.update_with_sp_album(&album),
Err(_) => self.disable(),
},
_ => self,
};
match self.state {
State::Alive => Ok(()),
State::Dead => Err(Error::NotFound),
}
}
fn find_by_entry_id(entry_id: Uuid) -> Vec<Album> {
let conn = conn().unwrap();
let stmt = conn.prepare(
&format!("SELECT {} FROM albums p LEFT JOIN album_entries pe
ON p.id = pe.album_id
WHERE pe.entry_id = $1
ORDER BY p.published_at DESC",
Album::props_str("p."))).unwrap();
let rows = stmt.query(&[&entry_id]).unwrap();
Album::rows_to_items(rows)
}
}
impl Album {
pub fn find_all() -> Vec<Album> {
let conn = conn().unwrap();
let stmt = conn.prepare(
&format!("SELECT {} FROM albums ORDER BY albums.published_at DESC",
Album::props_str(""))).unwrap();
let rows = stmt.query(&[]).unwrap();
Album::rows_to_items(rows)
}
fn add_track(&mut self, track: &Track) -> Result<(), Error> {
let conn = conn()?;
let stmt = conn.prepare("INSERT INTO album_tracks (track_id, album_id)
VALUES ($1, $2)")?;
stmt.query(&[&track.id, &self.id])?;
Ok(())
}
fn add_artist(&mut self, artist: &Artist) -> Result<(), Error> {
let conn = conn()?;
let stmt = conn.prepare("INSERT INTO album_artists (album_id, artist_id) VALUES ($1, $2)")?;
stmt.query(&[&self.id, &artist.id])?;
match self.artists {
Some(ref mut artists) => artists.push(artist.clone()),
None => self.artists = Some(vec![artist.clone()]),
}
Ok(())
}
pub fn find_by_artist(artist_id: Uuid) -> Vec<Album> {
let conn = conn().unwrap();
let stmt = conn.prepare(
&format!("SELECT {} FROM albums
LEFT OUTER JOIN album_artists ON album_artists.album_id = albums.id
WHERE album_artists.artist_id = $1 ORDER BY albums.created_at DESC",
Album::props_str("albums."))).unwrap();
let rows = stmt.query(&[&artist_id]).unwrap();
Album::rows_to_items(rows)
}
pub fn find_by_provider(provider: &Provider) -> Vec<Album> {
let conn = conn().unwrap();
let stmt = conn.prepare(
&format!("SELECT {} FROM albums WHERE albums.provider = $1
ORDER BY albums.created_at DESC",
Album::props_str(""))).unwrap();
let rows = stmt.query(&[&(*provider).to_string()]).unwrap();
Album::rows_to_items(rows)
}
pub fn from_sp_album(album: &spotify::Album) -> Album {
Album::find_or_create(Provider::Spotify, (*album).id.to_string())
.unwrap()
.update_with_sp_album(album)
.clone()
}
pub fn from_am_album(album: &apple_music::Album) -> Album {
Album::find_or_create(Provider::AppleMusic, (*album).id.to_string())
.unwrap()
.update_with_am_album(album)
.clone()
}
fn add_tracks(&mut self, tracks: Vec<Track>) {
self.tracks = tracks.iter().map(|t| {
let mut t = t.clone();
if let Ok(new_track) = Track::find_or_create(t.provider,
t.identifier.to_string()) {
t.id = new_track.id;
let _ = t.save();
let _ = self.add_track(&t);
};
t
}).collect::<Vec<_>>();
}
fn add_artists(&mut self, artists: Vec<Artist>) {
self.artists = Some(artists.iter().map(|a| {
let mut a = a.clone();
if let Ok(new_artist) = Artist::find_or_create(a.provider,
a.identifier.to_string()) {
a.id = new_artist.id;
let _ = a.save();
let _ = self.add_artist(&a);
};
a
}).collect::<Vec<_>>());
}
pub fn update_with_sp_album(&mut self, album: &spotify::Album) -> &mut Album {
self.provider = Provider::Spotify;
self.identifier = album.id.to_string();
if album.artists.len() > 0 {
self.owner_id = Some(album.artists[0].id.clone());
self.owner_name = Some(album.artists[0].name.clone());
}
self.url = album.uri.clone();
self.title = album.name.clone();
self.description = None;
self.state = State::Alive;
self.published_at = Utc::now().naive_utc();
if album.images.len() > 0 {
self.artwork_url = Some(album.images[0].url.clone());
self.thumbnail_url = Some(album.images[0].url.clone());
}
if album.images.len() > 1 {
self.thumbnail_url = Some(album.images[1].url.clone());
}
let artist_ids = album.artists.iter().map(|a| a.id.clone()).collect::<Vec<String>>();
let sp_artists = spotify::fetch_artists(artist_ids).unwrap_or_default();
let artists = sp_artists.iter().map(|ref a| Artist::from_sp_artist(a))
.collect::<Vec<_>>();
self.add_artists(artists);
let track_ids = album.tracks.clone()
.map(|t| t.items).unwrap_or(vec![]).iter()
.filter(|ref t| t.id.is_some())
.map(|t| t.clone().id.unwrap())
.collect();
let sp_tracks = spotify::fetch_tracks(track_ids).unwrap_or(vec![]);
let tracks = sp_tracks.iter()
.map(|ref t| Track::from_sp_track(t))
.filter(|ref t| t.is_ok())
.map(|t| t.unwrap().clone())
.map(|ref mut t| t.update_with_sp_album(&album).clone())
.collect::<Vec<_>>();
self.add_tracks(tracks);
self
}
pub fn update_with_am_album(&mut self, album: &apple_music::Album) -> &mut Album {
let album_artists = album.clone().relationships.map(|r| {
r.artists.data.clone()
});
if let Some(album_artist) = album_artists.clone().and_then(|a| a.first().map(|a| a.clone())) {
let artist_name = album_artist.attributes.name.clone();
self.provider = Provider::AppleMusic;
self.identifier = album.id.to_string();
self.owner_id = Some(album_artist.id.to_string());
self.owner_name = Some(artist_name.to_string());
self.url = album.attributes.url.clone();
self.title = album.attributes.name.clone();
self.description = album.attributes.editorial_notes.clone().and_then(|n| n.short.clone());
self.thumbnail_url = Some(album.attributes.artwork.get_thumbnail_url());
self.artwork_url = Some(album.attributes.artwork.get_artwork_url());
self.state = State::Alive;
}
let country = apple_music::country(&self.url);
if let Some(album_artists) = album_artists.clone() {
let artist_ids = album_artists.iter().map(|a| a.id.clone()).collect::<Vec<String>>();
let artists = apple_music::fetch_artists(&country, artist_ids).unwrap_or(vec![]);
self.add_artists(artists.iter().map(|a| Artist::from_am_artist(a)).collect());
}
let album_tracks = album.clone().relationships.map(|r| {
r.tracks.data.clone()
}).unwrap_or(vec![]);
let songs = album_tracks.iter().map(|track| match *track {
apple_music::Track::Song(ref song) => Some(song),
apple_music::Track::MusicVideo(_) => None,
}).filter(|song| song.is_some())
.map(|song| song.unwrap())
.collect::<Vec<_>>();
let song_ids = songs.iter().map(|song| song.id.clone()).collect::<Vec<String>>();
let songs = apple_music::fetch_songs(&country, song_ids).unwrap_or(vec![]);
self.add_tracks(songs.iter().map(|song| Track::from_am_song(song)).collect());
self
}
pub fn disable(&mut self) -> &mut Album {
self.state = State::Dead;
self
}
pub fn delete(&self) -> Result<(), Error> {
let conn = conn()?;
let stmt = conn.prepare("DELETE FROM albums WHERE id=$1")?;
let result = stmt.query(&[&self.id]);
match result {
Ok(_) => Ok(()),
Err(_) => Err(Error::Unexpected)
}
}
}
#[cfg(test)]
mod test {
use model::enclosure::Enclosure;
use model::Model;
use super::Album;
use Provider;
#[test]
fn test_new() {
let album = Album::new(Provider::YouTube,
"PLy8LZ8FM-o0ViuGAF68RAaXkQ8V-3dbTX".to_string());
assert_eq!(album.provider, Provider::YouTube);
assert_eq!(&album.identifier, "PLy8LZ8FM-o0ViuGAF68RAaXkQ8V-3dbTX")
}
#[test]
fn test_find_of_create() {
let identifier = "PLy8LZ8FM-o0ViuGAF68RAaXkQ8V-3dbTX".to_string();
let result = Album::find_or_create(Provider::YouTube, identifier);
assert!(result.is_ok());
}
#[test]
fn test_delete() {
let identifier = "test_delete".to_string();
let album = Album::find_or_create(Provider::YouTube, identifier).unwrap();
let result = album.delete();
assert!(result.is_ok());
}
#[test]
fn test_save() {
let id = "test_save";
let mut album = Album::find_or_create(Provider::YouTube, id.to_string()).unwrap();
album.title = "title".to_string();
let result = album.save();
assert!(result.is_ok());
let album = Album::find_or_create(Provider::YouTube, id.to_string()).unwrap();
assert_eq!(&album.title, "title");
}
}<|fim▁end|>
|
}
|
<|file_name|>attributes.go<|end_file_name|><|fim▁begin|>/*
Copyright IBM Corp. 2016 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package attributes
import (
"bytes"
"crypto/x509"
"encoding/asn1"
"errors"
"fmt"
"strconv"
"strings"
pb "github.com/fabric_sdk_golang/core/crypto/attributes/proto"
"github.com/fabric_sdk_golang/core/crypto/primitives"
"github.com/golang/protobuf/proto"
)
var (
// TCertEncAttributesBase is the base ASN1 object identifier for attributes.
// When generating an extension to include the attribute an index will be
// appended to this Object Identifier.
TCertEncAttributesBase = asn1.ObjectIdentifier{1, 2, 3, 4, 5, 6}
// TCertAttributesHeaders is the ASN1 object identifier of attributes header.
TCertAttributesHeaders = asn1.ObjectIdentifier{1, 2, 3, 4, 5, 6, 9}
padding = []byte{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}
//headerPrefix is the prefix used in the header exteion of the certificate.
headerPrefix = "00HEAD"
//HeaderAttributeName is the name used to derivate the K used to encrypt/decrypt the header.
HeaderAttributeName = "attributeHeader"
)
//ParseAttributesHeader parses a string and returns a map with the attributes.
func ParseAttributesHeader(header string) (map[string]int, error) {
if !strings.HasPrefix(header, headerPrefix) {
return nil, errors.New("Invalid header")
}
headerBody := strings.Replace(header, headerPrefix, "", 1)
tokens := strings.Split(headerBody, "#")
result := make(map[string]int)
for _, token := range tokens {
pair := strings.Split(token, "->")
if len(pair) == 2 {
key := pair[0]
valueStr := pair[1]
value, err := strconv.Atoi(valueStr)
if err != nil {
return nil, err
}
result[key] = value
}
}
return result, nil
}
//ReadAttributeHeader read the header of the attributes.
func ReadAttributeHeader(tcert *x509.Certificate, headerKey []byte) (map[string]int, bool, error) {
var err error
var headerRaw []byte
encrypted := false
if headerRaw, err = primitives.GetCriticalExtension(tcert, TCertAttributesHeaders); err != nil {
return nil, encrypted, err
}
headerStr := string(headerRaw)
var header map[string]int
header, err = ParseAttributesHeader(headerStr)
if err != nil {
if headerKey == nil {
return nil, false, errors.New("Is not possible read an attribute encrypted without the headerKey")
}
headerRaw, err = DecryptAttributeValue(headerKey, headerRaw)
if err != nil {
return nil, encrypted, errors.New("error decrypting header value '" + err.Error() + "''")
}
headerStr = string(headerRaw)
header, err = ParseAttributesHeader(headerStr)
if err != nil {
return nil, encrypted, err
}
encrypted = true
}
return header, encrypted, nil
}
//ReadTCertAttributeByPosition read the attribute stored in the position "position" of the tcert.
func ReadTCertAttributeByPosition(tcert *x509.Certificate, position int) ([]byte, error) {
if position <= 0 {
return nil, fmt.Errorf("Invalid attribute position. Received [%v]", position)
}
oid := asn1.ObjectIdentifier{1, 2, 3, 4, 5, 6, 9 + position}
value, err := primitives.GetCriticalExtension(tcert, oid)
if err != nil {
return nil, err
}
return value, nil
}
//ReadTCertAttribute read the attribute with name "attributeName" and returns the value and a boolean indicating if the returned value is encrypted or not.
func ReadTCertAttribute(tcert *x509.Certificate, attributeName string, headerKey []byte) ([]byte, bool, error) {
header, encrypted, err := ReadAttributeHeader(tcert, headerKey)
if err != nil {
return nil, false, err
}
position := header[attributeName]
if position == 0 {
return nil, encrypted, errors.New("Failed attribute '" + attributeName + "' doesn't exists in the TCert.")
}
value, err := ReadTCertAttributeByPosition(tcert, position)
if err != nil {
return nil, encrypted, err
}
return value, encrypted, nil
}
//EncryptAttributeValue encrypts "attributeValue" using "attributeKey"
func EncryptAttributeValue(attributeKey []byte, attributeValue []byte) ([]byte, error) {
value := append(attributeValue, padding...)
return primitives.CBCPKCS7Encrypt(attributeKey, value)
}
//getAttributeKey returns the attributeKey derived from the preK0 to the attributeName.
func getAttributeKey(preK0 []byte, attributeName string) []byte {
return primitives.HMACTruncated(preK0, []byte(attributeName), 32)
}
<|fim▁hole|> attributeKey := getAttributeKey(preK0, attributeName)
return EncryptAttributeValue(attributeKey, attributeValue)
}
//DecryptAttributeValue decrypts "encryptedValue" using "attributeKey" and return the decrypted value.
func DecryptAttributeValue(attributeKey []byte, encryptedValue []byte) ([]byte, error) {
value, err := primitives.CBCPKCS7Decrypt(attributeKey, encryptedValue)
if err != nil {
return nil, err
}
lenPadding := len(padding)
lenValue := len(value)
if lenValue < lenPadding {
return nil, errors.New("Error invalid value. Decryption verification failed.")
}
lenWithoutPadding := lenValue - lenPadding
if bytes.Compare(padding[0:lenPadding], value[lenWithoutPadding:lenValue]) != 0 {
return nil, errors.New("Error generating decryption key for value. Decryption verification failed.")
}
value = value[0:lenWithoutPadding]
return value, nil
}
//getKAndValueForAttribute derives K for the attribute "attributeName", checks the value padding and returns both key and decrypted value
func getKAndValueForAttribute(attributeName string, preK0 []byte, cert *x509.Certificate) ([]byte, []byte, error) {
headerKey := getAttributeKey(preK0, HeaderAttributeName)
value, encrypted, err := ReadTCertAttribute(cert, attributeName, headerKey)
if err != nil {
return nil, nil, err
}
attributeKey := getAttributeKey(preK0, attributeName)
if encrypted {
value, err = DecryptAttributeValue(attributeKey, value)
if err != nil {
return nil, nil, err
}
}
return attributeKey, value, nil
}
//GetKForAttribute derives the K for the attribute "attributeName" and returns the key
func GetKForAttribute(attributeName string, preK0 []byte, cert *x509.Certificate) ([]byte, error) {
key, _, err := getKAndValueForAttribute(attributeName, preK0, cert)
return key, err
}
//GetValueForAttribute derives the K for the attribute "attributeName" and returns the value
func GetValueForAttribute(attributeName string, preK0 []byte, cert *x509.Certificate) ([]byte, error) {
_, value, err := getKAndValueForAttribute(attributeName, preK0, cert)
return value, err
}
func createAttributesHeaderEntry(preK0 []byte) *pb.AttributesMetadataEntry {
attKey := getAttributeKey(preK0, HeaderAttributeName)
return &pb.AttributesMetadataEntry{AttributeName: HeaderAttributeName, AttributeKey: attKey}
}
func createAttributesMetadataEntry(attributeName string, preK0 []byte) *pb.AttributesMetadataEntry {
attKey := getAttributeKey(preK0, attributeName)
return &pb.AttributesMetadataEntry{AttributeName: attributeName, AttributeKey: attKey}
}
//CreateAttributesMetadataObjectFromCert creates an AttributesMetadata object from certificate "cert", metadata and the attributes keys.
func CreateAttributesMetadataObjectFromCert(cert *x509.Certificate, metadata []byte, preK0 []byte, attributeKeys []string) *pb.AttributesMetadata {
var entries []*pb.AttributesMetadataEntry
for _, key := range attributeKeys {
if len(key) == 0 {
continue
}
entry := createAttributesMetadataEntry(key, preK0)
entries = append(entries, entry)
}
headerEntry := createAttributesHeaderEntry(preK0)
entries = append(entries, headerEntry)
return &pb.AttributesMetadata{Metadata: metadata, Entries: entries}
}
//CreateAttributesMetadataFromCert creates the AttributesMetadata from the original metadata and certificate "cert".
func CreateAttributesMetadataFromCert(cert *x509.Certificate, metadata []byte, preK0 []byte, attributeKeys []string) ([]byte, error) {
attributesMetadata := CreateAttributesMetadataObjectFromCert(cert, metadata, preK0, attributeKeys)
return proto.Marshal(attributesMetadata)
}
//CreateAttributesMetadata create the AttributesMetadata from the original metadata
func CreateAttributesMetadata(raw []byte, metadata []byte, preK0 []byte, attributeKeys []string) ([]byte, error) {
cert, err := primitives.DERToX509Certificate(raw)
if err != nil {
return nil, err
}
return CreateAttributesMetadataFromCert(cert, metadata, preK0, attributeKeys)
}
//GetAttributesMetadata object from the original metadata "metadata".
func GetAttributesMetadata(metadata []byte) (*pb.AttributesMetadata, error) {
attributesMetadata := &pb.AttributesMetadata{}
err := proto.Unmarshal(metadata, attributesMetadata)
return attributesMetadata, err
}
//BuildAttributesHeader builds a header attribute from a map of attribute names and positions.
func BuildAttributesHeader(attributesHeader map[string]int) ([]byte, error) {
var header []byte
var headerString string
var positions = make(map[int]bool)
for k, v := range attributesHeader {
if positions[v] {
return nil, errors.New("Duplicated position found in attributes header")
}
positions[v] = true
vStr := strconv.Itoa(v)
headerString = headerString + k + "->" + vStr + "#"
}
header = []byte(headerPrefix + headerString)
return header, nil
}<|fim▁end|>
|
//EncryptAttributeValuePK0 encrypts "attributeValue" using a key derived from preK0.
func EncryptAttributeValuePK0(preK0 []byte, attributeName string, attributeValue []byte) ([]byte, error) {
|
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
h "goshawkdb.io/tests/harness"
"log"
"time"
)
func main() {
setup := h.NewSetup()
config1, err := h.NewPathProvider("./v1.json", false)
if err != nil {
log.Fatal(err)
}
config2, err := h.NewPathProvider("./v2.json", false)
if err != nil {
log.Fatal(err)
}
rm1 := setup.NewRM("one", 10001, nil, config1)
rm2 := setup.NewRM("two", 10002, nil, config1)
rm3 := setup.NewRM("three", 10003, nil, config1)
rm4 := setup.NewRM("four", 10004, nil, config2)
prog := h.Program([]h.Instruction{
setup,
rm1.Start(),
rm2.Start(),
rm3.Start(),
setup.Sleep(5 * time.Second),
rm4.Start(),
setup.Sleep(15 * time.Second),
rm1.Terminate(),
rm2.Terminate(),
rm3.Terminate(),
rm4.Terminate(),
rm1.Wait(),<|fim▁hole|> rm4.Wait(),
})
log.Println(h.Run(setup, prog))
}<|fim▁end|>
|
rm2.Wait(),
rm3.Wait(),
|
<|file_name|>ajax.js<|end_file_name|><|fim▁begin|>// ajax mode: abort
// usage: $.ajax({ mode: "abort"[, port: "uniqueport"]});
// if mode:"abort" is used, the previous request on that port (port can be undefined) is aborted via XMLHttpRequest.abort()
var pendingRequests = {},
ajax;
// Use a prefilter if available (1.5+)
if ($.ajaxPrefilter) {
$.ajaxPrefilter(function (settings, _, xhr) {
var port = settings.port;
if (settings.mode === "abort") {
if (pendingRequests[port]) {
pendingRequests[port].abort();
}
pendingRequests[port] = xhr;
}
});
} else {<|fim▁hole|> ajax = $.ajax;
$.ajax = function (settings) {
var mode = ( "mode" in settings ? settings : $.ajaxSettings ).mode,
port = ( "port" in settings ? settings : $.ajaxSettings ).port;
if (mode === "abort") {
if (pendingRequests[port]) {
pendingRequests[port].abort();
}
pendingRequests[port] = ajax.apply(this, arguments);
return pendingRequests[port];
}
return ajax.apply(this, arguments);
};
}<|fim▁end|>
|
// Proxy ajax
|
<|file_name|>incremental.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use flow::{self, FlowFlags, Flow};
use style::computed_values::float;
use style::selector_parser::RestyleDamage;
use style::servo::restyle_damage::ServoRestyleDamage;
<|fim▁hole|>#[derive(Clone, Copy, PartialEq)]
pub enum RelayoutMode {
Incremental,
Force
}
bitflags! {
pub struct SpecialRestyleDamage: u8 {
#[doc = "If this flag is set, we need to reflow the entire document. This is more or less a \
temporary hack to deal with cases that we don't handle incrementally yet."]
const REFLOW_ENTIRE_DOCUMENT = 0x01;
}
}
pub trait LayoutDamageComputation {
fn compute_layout_damage(self) -> SpecialRestyleDamage;
fn reflow_entire_document(self);
}
impl<'a> LayoutDamageComputation for &'a mut Flow {
fn compute_layout_damage(self) -> SpecialRestyleDamage {
let mut special_damage = SpecialRestyleDamage::empty();
let is_absolutely_positioned = flow::base(self).flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED);
// In addition to damage, we use this phase to compute whether nodes affect CSS counters.
let mut has_counter_affecting_children = false;
{
let self_base = flow::mut_base(self);
// Take a snapshot of the parent damage before updating it with damage from children.
let parent_damage = self_base.restyle_damage;
for kid in self_base.children.iter_mut() {
let child_is_absolutely_positioned =
flow::base(kid).flags.contains(FlowFlags::IS_ABSOLUTELY_POSITIONED);
flow::mut_base(kid).restyle_damage.insert(
parent_damage.damage_for_child(is_absolutely_positioned,
child_is_absolutely_positioned));
{
let kid: &mut Flow = kid;
special_damage.insert(kid.compute_layout_damage());
}
self_base.restyle_damage
.insert(flow::base(kid).restyle_damage.damage_for_parent(
child_is_absolutely_positioned));
has_counter_affecting_children = has_counter_affecting_children ||
flow::base(kid).flags.intersects(FlowFlags::AFFECTS_COUNTERS |
FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN);
}
}
let self_base = flow::mut_base(self);
if self_base.flags.float_kind() != float::T::none &&
self_base.restyle_damage.intersects(ServoRestyleDamage::REFLOW) {
special_damage.insert(SpecialRestyleDamage::REFLOW_ENTIRE_DOCUMENT);
}
if has_counter_affecting_children {
self_base.flags.insert(FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN)
} else {
self_base.flags.remove(FlowFlags::HAS_COUNTER_AFFECTING_CHILDREN)
}
special_damage
}
fn reflow_entire_document(self) {
let self_base = flow::mut_base(self);
self_base.restyle_damage.insert(RestyleDamage::rebuild_and_reflow());
self_base.restyle_damage.remove(ServoRestyleDamage::RECONSTRUCT_FLOW);
for kid in self_base.children.iter_mut() {
kid.reflow_entire_document();
}
}
}<|fim▁end|>
|
/// Used in a flow traversal to indicate whether this re-layout should be incremental or not.
|
<|file_name|>StalkerRequestHandler.java<|end_file_name|><|fim▁begin|>package com.mvas.webproxy.portals;
import com.mvas.webproxy.DeviceConnectionInfo;
import com.mvas.webproxy.RequestData;
import com.mvas.webproxy.WebServer;
import com.mvas.webproxy.config.PortalConfiguration;
import org.apache.commons.io.IOUtils;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class StalkerRequestHandler implements AbstractRequestHandler {
private static final Logger logger = LoggerFactory.getLogger(StalkerRequestHandler.class);
public static final String GET_PARAM_DEVICE_ID = "device_id";
public static final String GET_PARAM_DEVICE_ID2 = "device_id2";
public static final String GET_PARAM_SIGNATURE = "signature";
public static final String GET_PARAM_SESSION = "session";
public static final String CONFIG_PORTAL_URL = "portal";
public static final String GET_PARAM_MAC_ADDRESS = "mac";
public static final String ACTION_HANDSHAKE = "action=handshake";
Pattern cookiePattern = Pattern.compile(".*mac=(([0-9A-F]{2}[:-]){5}([0-9A-F]{2})).*");
Pattern handshakePattern = Pattern.compile(ACTION_HANDSHAKE);
public static final String HEADER_AUTHORIZATION = "Authorization";
public static final class HeadersList {
public HashMap<String, String> getList() {
return list;
}
private HashMap<String, String> list = new HashMap<>();
}
public static final class ParamsList {
public HashMap<String, String> getList() {
return list;
}
private HashMap<String, String> list = new HashMap<>();
}
public HashMap<String, HeadersList> staticHeadersForMac = new HashMap<>();
public HashMap<String, ParamsList> staticParamsForMac = new HashMap<>();
DeviceConnectionInfo deviceConnectionInfo;
static HashMap<String, Pattern> replacements = new HashMap<>();
static String[] configOptions;
static String[] getParamNames;
static {
getParamNames = new String[] {
GET_PARAM_DEVICE_ID, GET_PARAM_DEVICE_ID2, GET_PARAM_SIGNATURE, GET_PARAM_SESSION
};
for (String name : getParamNames) {
replacements.put(name, Pattern.compile("([\\?&])?(" + name + ")=([a-zA-Z0-9/+]*)"));
}
configOptions = new String[] {
GET_PARAM_DEVICE_ID, GET_PARAM_DEVICE_ID2, GET_PARAM_SIGNATURE, GET_PARAM_SESSION, CONFIG_PORTAL_URL, GET_PARAM_MAC_ADDRESS
};
}
private HeadersList getHeadersForRequest(final RequestData requestData)
{
return staticHeadersForMac.get(requestData.getMacAddress());
}
private ParamsList getParamsForRequest(final RequestData requestData)
{
return staticParamsForMac.get(requestData.getMacAddress());
}
public StalkerRequestHandler(DeviceConnectionInfo deviceConnectionInfo)
{
this.deviceConnectionInfo = deviceConnectionInfo;
}
@Override
public String getURL(final RequestData requestData) {
logger.debug("StalkerRequestHandler::getURL()");
String result = requestData.getRealUrl().toString();
HashMap<String, String> staticParams = getParamsForRequest(requestData).getList();
for(Map.Entry<String, Pattern> entry: replacements.entrySet())
{
Matcher matcher = entry.getValue().matcher(result);
if(matcher.find())
{
String name = matcher.group(2);
if(staticParams.get(name) == null)
{
String value = matcher.group(3);
staticParams.put(name, value);
logger.debug("set static param [" + name + "] to [" + value + "]");
if(name.equals(GET_PARAM_DEVICE_ID))
WebServer.getPortalConfiguration().set(requestData.getConnectionName(), name, value);
if(name.equals(GET_PARAM_DEVICE_ID2))
WebServer.getPortalConfiguration().set(requestData.getConnectionName(), name, value);
}
}
}
for(Map.Entry<String, Pattern> rep: replacements.entrySet())
{
Pattern from = rep.getValue();
String to = rep.getKey();
result = result.replaceAll(from.pattern(), "$1$2=" + staticParams.get(to));
}
logger.debug("New query string: " + result);
return result;
}
@Override
public void onRequest(final RequestData requestData, final URLConnection urlConnection) {
logger.debug("StalkerRequestHandler::onRequest()");
HashMap<String, String> staticHeaders = getHeadersForRequest(requestData).getList();
for(Map.Entry<String, String> header: requestData.getHeaders().entrySet())
{
String headerName = header.getKey();
String headerValue = header.getValue();
if(headerName.equals(HEADER_AUTHORIZATION))
{
if(staticHeaders.get(HEADER_AUTHORIZATION) == null)
staticHeaders.put(HEADER_AUTHORIZATION, headerValue);
else
{
String authorizationHeader = staticHeaders.get(HEADER_AUTHORIZATION);
if(headerValue.equals(authorizationHeader))
continue;
logger.debug("Overwriting [" + HEADER_AUTHORIZATION + "] from {" + headerValue + " } to {" + authorizationHeader + "}");
urlConnection.setRequestProperty(HEADER_AUTHORIZATION, authorizationHeader);
}
}
}
}
@Override
public InputStream onResponse(final RequestData requestData, final InputStream iStream) {
logger.debug("StalkerRequestHandler::onResponse()");
String target = requestData.getTarget();
if(!target.contains(".php"))
return iStream;
try {
String data = IOUtils.toString(iStream);
Matcher matcher = handshakePattern.matcher(requestData.getRealUrl().toString());
if(matcher.find())
{
processHandshake(requestData, data);
}
return IOUtils.toInputStream(data);
} catch (IOException e) {
e.printStackTrace();
}
return iStream;
}
@Override
public void onBeforeRequest(final RequestData requestData, final PortalConfiguration portalConfiguration) {
logger.debug("StalkerRequestHandler::onBeforeRequest()");
String macAddress = requestData.getMacAddress();
if(macAddress == null || macAddress.isEmpty())
{
String cookie = requestData.getCookie().replace("%3A", ":");
Matcher matcher = cookiePattern.matcher(cookie);
if(matcher.find())
{
requestData.setMacAddress(matcher.group(1));
macAddress = matcher.group(1);
logger.debug("MAC: " + matcher.group(1));
}
else
macAddress = "empty";
}
if(!staticHeadersForMac.containsKey(macAddress))
staticHeadersForMac.put(macAddress, new HeadersList());
HashMap<String, String> staticHeaders = getHeadersForRequest(requestData).getList();
String token = portalConfiguration.get(requestData.getConnectionName(), "token");
if(token != null)
staticHeaders.put(HEADER_AUTHORIZATION, "Bearer " + token);
if(!staticParamsForMac.containsKey(macAddress))
{
ParamsList params = new ParamsList();<|fim▁hole|> {
String value = portalConfiguration.get(requestData.getConnectionName(), name);
if(value == null)
{
logger.debug("Skipping NULL config value [" + name + "]");
continue;
}
logger.debug("Loading {" + name + "} -> {" + value + "}");
list.put(name, value);
}
staticParamsForMac.put(macAddress, params);
}
try {
requestData.setRealUrl(new URL(getURL(requestData)));
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
public void processHandshake(final RequestData requestData, final String data)
{
logger.debug("StalkerRequestHandler::processHandshake()");
HashMap<String, String> staticHeaders = getHeadersForRequest(requestData).getList();
JSONObject json;
try {
json = new JSONObject(data);
JSONObject js = json.getJSONObject("js");
String token = js.getString("token");
staticHeaders.put(HEADER_AUTHORIZATION, "Bearer " + token);
WebServer.getPortalConfiguration().set(requestData.getConnectionName(), "token", token);
} catch (JSONException e) {
e.printStackTrace();
}
}
}<|fim▁end|>
|
HashMap<String, String> list = params.getList();
for(String name: configOptions)
|
<|file_name|>0005_auto__chg_field_campaign_user.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Campaign.user'
db.alter_column(u'campaign_campaign', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True))
def backwards(self, orm):
# Changing field 'Campaign.user'
db.alter_column(u'campaign_campaign', 'user_id', self.gf('django.db.models.fields.related.ForeignKey')(default=0, to=orm['auth.User']))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'campaign.campaign': {
'Meta': {'object_name': 'Campaign'},
'goal': ('django.db.models.fields.DecimalField', [], {'max_digits': '15', 'decimal_places': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('cloudinary.models.CloudinaryField', [], {'max_length': '100'}),
'message': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '50'}),<|fim▁hole|> },
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['campaign']<|fim▁end|>
|
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
|
<|file_name|>jasmine-html.js<|end_file_name|><|fim▁begin|>/* */
"format global";
jasmine.HtmlReporterHelpers = {};
jasmine.HtmlReporterHelpers.createDom = function(type, attrs, childrenVarArgs) {
var el = document.createElement(type);
for (var i = 2; i < arguments.length; i++) {
var child = arguments[i];
if (typeof child === 'string') {
el.appendChild(document.createTextNode(child));
} else {
if (child) {
el.appendChild(child);
}
}
}
for (var attr in attrs) {
if (attr == "className") {
el[attr] = attrs[attr];
} else {
el.setAttribute(attr, attrs[attr]);
}
}
return el;
};
jasmine.HtmlReporterHelpers.getSpecStatus = function(child) {
var results = child.results();
var status = results.passed() ? 'passed' : 'failed';
if (results.skipped) {
status = 'skipped';
}
return status;
};
jasmine.HtmlReporterHelpers.appendToSummary = function(child, childElement) {
var parentDiv = this.dom.summary;
var parentSuite = (typeof child.parentSuite == 'undefined') ? 'suite' : 'parentSuite';
var parent = child[parentSuite];
if (parent) {
if (typeof this.views.suites[parent.id] == 'undefined') {
this.views.suites[parent.id] = new jasmine.HtmlReporter.SuiteView(parent, this.dom, this.views);
}
parentDiv = this.views.suites[parent.id].element;
}
parentDiv.appendChild(childElement);
};
jasmine.HtmlReporterHelpers.addHelpers = function(ctor) {
for(var fn in jasmine.HtmlReporterHelpers) {
ctor.prototype[fn] = jasmine.HtmlReporterHelpers[fn];
}
};
jasmine.HtmlReporter = function(_doc) {
var self = this;
var doc = _doc || window.document;
var reporterView;
var dom = {};
// Jasmine Reporter Public Interface
self.logRunningSpecs = false;
self.reportRunnerStarting = function(runner) {
var specs = runner.specs() || [];
if (specs.length == 0) {
return;
}
createReporterDom(runner.env.versionString());
doc.body.appendChild(dom.reporter);
reporterView = new jasmine.HtmlReporter.ReporterView(dom);
reporterView.addSpecs(specs, self.specFilter);
};
self.reportRunnerResults = function(runner) {
reporterView && reporterView.complete();
};
self.reportSuiteResults = function(suite) {
reporterView.suiteComplete(suite);
};
self.reportSpecStarting = function(spec) {
if (self.logRunningSpecs) {
self.log('>> Jasmine Running ' + spec.suite.description + ' ' + spec.description + '...');
}
};
self.reportSpecResults = function(spec) {
reporterView.specComplete(spec);
};
self.log = function() {
var console = jasmine.getGlobal().console;
if (console && console.log) {
if (console.log.apply) {
console.log.apply(console, arguments);
} else {
console.log(arguments); // ie fix: console.log.apply doesn't exist on ie
}
}
};
self.specFilter = function(spec) {
if (!focusedSpecName()) {
return true;
}
return spec.getFullName().indexOf(focusedSpecName()) === 0;
};
return self;
function focusedSpecName() {
var specName;
(function memoizeFocusedSpec() {
if (specName) {
return;
}
var paramMap = [];
var params = doc.location.search.substring(1).split('&');
for (var i = 0; i < params.length; i++) {
var p = params[i].split('=');
paramMap[decodeURIComponent(p[0])] = decodeURIComponent(p[1]);
}
specName = paramMap.spec;
})();
return specName;
}
function createReporterDom(version) {
dom.reporter = self.createDom('div', { id: 'HTMLReporter', className: 'jasmine_reporter' },
dom.banner = self.createDom('div', { className: 'banner' },
self.createDom('span', { className: 'title' }, "Jasmine "),
self.createDom('span', { className: 'version' }, version)),
dom.symbolSummary = self.createDom('ul', {className: 'symbolSummary'}),
dom.alert = self.createDom('div', {className: 'alert'}),
dom.results = self.createDom('div', {className: 'results'},
dom.summary = self.createDom('div', { className: 'summary' }),
dom.details = self.createDom('div', { id: 'details' }))
);
}
};
jasmine.HtmlReporterHelpers.addHelpers(jasmine.HtmlReporter);jasmine.HtmlReporter.ReporterView = function(dom) {
this.startedAt = new Date();
this.runningSpecCount = 0;
this.completeSpecCount = 0;
this.passedCount = 0;
this.failedCount = 0;
this.skippedCount = 0;
this.createResultsMenu = function() {
this.resultsMenu = this.createDom('span', {className: 'resultsMenu bar'},
this.summaryMenuItem = this.createDom('a', {className: 'summaryMenuItem', href: "#"}, '0 specs'),
' | ',
this.detailsMenuItem = this.createDom('a', {className: 'detailsMenuItem', href: "#"}, '0 failing'));
this.summaryMenuItem.onclick = function() {
dom.reporter.className = dom.reporter.className.replace(/ showDetails/g, '');
};
this.detailsMenuItem.onclick = function() {
showDetails();
};
};
this.addSpecs = function(specs, specFilter) {
this.totalSpecCount = specs.length;
this.views = {
specs: {},
suites: {}
};
for (var i = 0; i < specs.length; i++) {
var spec = specs[i];
this.views.specs[spec.id] = new jasmine.HtmlReporter.SpecView(spec, dom, this.views);
if (specFilter(spec)) {
this.runningSpecCount++;
}
}
};
this.specComplete = function(spec) {
this.completeSpecCount++;
if (isUndefined(this.views.specs[spec.id])) {
this.views.specs[spec.id] = new jasmine.HtmlReporter.SpecView(spec, dom);
}
var specView = this.views.specs[spec.id];
switch (specView.status()) {
case 'passed':
this.passedCount++;
break;
case 'failed':
this.failedCount++;
break;
case 'skipped':
this.skippedCount++;
break;
}
specView.refresh();
this.refresh();
};
this.suiteComplete = function(suite) {
var suiteView = this.views.suites[suite.id];
if (isUndefined(suiteView)) {
return;
}
suiteView.refresh();
};
this.refresh = function() {
if (isUndefined(this.resultsMenu)) {
this.createResultsMenu();
}
// currently running UI
if (isUndefined(this.runningAlert)) {
this.runningAlert = this.createDom('a', {href: "?", className: "runningAlert bar"});
dom.alert.appendChild(this.runningAlert);
}
this.runningAlert.innerHTML = "Running " + this.completeSpecCount + " of " + specPluralizedFor(this.totalSpecCount);
// skipped specs UI
if (isUndefined(this.skippedAlert)) {
this.skippedAlert = this.createDom('a', {href: "?", className: "skippedAlert bar"});
}
this.skippedAlert.innerHTML = "Skipping " + this.skippedCount + " of " + specPluralizedFor(this.totalSpecCount) + " - run all";
if (this.skippedCount === 1 && isDefined(dom.alert)) {
dom.alert.appendChild(this.skippedAlert);
}
// passing specs UI
if (isUndefined(this.passedAlert)) {
this.passedAlert = this.createDom('span', {href: "?", className: "passingAlert bar"});
}
this.passedAlert.innerHTML = "Passing " + specPluralizedFor(this.passedCount);
// failing specs UI
if (isUndefined(this.failedAlert)) {
this.failedAlert = this.createDom('span', {href: "?", className: "failingAlert bar"});
}
this.failedAlert.innerHTML = "Failing " + specPluralizedFor(this.failedCount);
if (this.failedCount === 1 && isDefined(dom.alert)) {
dom.alert.appendChild(this.failedAlert);
dom.alert.appendChild(this.resultsMenu);
}
// summary info
this.summaryMenuItem.innerHTML = "" + specPluralizedFor(this.runningSpecCount);
this.detailsMenuItem.innerHTML = "" + this.failedCount + " failing";
};
this.complete = function() {
dom.alert.removeChild(this.runningAlert);
this.skippedAlert.innerHTML = "Ran " + this.runningSpecCount + " of " + specPluralizedFor(this.totalSpecCount) + " - run all";
if (this.failedCount === 0) {
dom.alert.appendChild(this.createDom('span', {className: 'passingAlert bar'}, "Passing " + specPluralizedFor(this.passedCount)));
} else {
showDetails();
}
dom.banner.appendChild(this.createDom('span', {className: 'duration'}, "finished in " + ((new Date().getTime() - this.startedAt.getTime()) / 1000) + "s"));
};
return this;
function showDetails() {
if (dom.reporter.className.search(/showDetails/) === -1) {
dom.reporter.className += " showDetails";
}
}
function isUndefined(obj) {
return typeof obj === 'undefined';
}
function isDefined(obj) {
return !isUndefined(obj);
}
function specPluralizedFor(count) {
var str = count + " spec";
if (count > 1) {
str += "s"
}
return str;
}
};
jasmine.HtmlReporterHelpers.addHelpers(jasmine.HtmlReporter.ReporterView);
jasmine.HtmlReporter.SpecView = function(spec, dom, views) {
this.spec = spec;
this.dom = dom;
this.views = views;
this.symbol = this.createDom('li', { className: 'pending' });
this.dom.symbolSummary.appendChild(this.symbol);
this.summary = this.createDom('div', { className: 'specSummary' },
this.createDom('a', {
className: 'description',
href: '?spec=' + encodeURIComponent(this.spec.getFullName()),
title: this.spec.getFullName()
}, this.spec.description)
);
this.detail = this.createDom('div', { className: 'specDetail' },
this.createDom('a', {
className: 'description',
href: '?spec=' + encodeURIComponent(this.spec.getFullName()),
title: this.spec.getFullName()
}, this.spec.getFullName())
);
};
jasmine.HtmlReporter.SpecView.prototype.status = function() {
return this.getSpecStatus(this.spec);
};
jasmine.HtmlReporter.SpecView.prototype.refresh = function() {
this.symbol.className = this.status();
switch (this.status()) {<|fim▁hole|> case 'skipped':
break;
case 'passed':
this.appendSummaryToSuiteDiv();
break;
case 'failed':
this.appendSummaryToSuiteDiv();
this.appendFailureDetail();
break;
}
};
jasmine.HtmlReporter.SpecView.prototype.appendSummaryToSuiteDiv = function() {
this.summary.className += ' ' + this.status();
this.appendToSummary(this.spec, this.summary);
};
jasmine.HtmlReporter.SpecView.prototype.appendFailureDetail = function() {
this.detail.className += ' ' + this.status();
var resultItems = this.spec.results().getItems();
var messagesDiv = this.createDom('div', { className: 'messages' });
for (var i = 0; i < resultItems.length; i++) {
var result = resultItems[i];
if (result.type == 'log') {
messagesDiv.appendChild(this.createDom('div', {className: 'resultMessage log'}, result.toString()));
} else if (result.type == 'expect' && result.passed && !result.passed()) {
messagesDiv.appendChild(this.createDom('div', {className: 'resultMessage fail'}, result.message));
if (result.trace.stack) {
messagesDiv.appendChild(this.createDom('div', {className: 'stackTrace'}, result.trace.stack));
}
}
}
if (messagesDiv.childNodes.length > 0) {
this.detail.appendChild(messagesDiv);
this.dom.details.appendChild(this.detail);
}
};
jasmine.HtmlReporterHelpers.addHelpers(jasmine.HtmlReporter.SpecView);jasmine.HtmlReporter.SuiteView = function(suite, dom, views) {
this.suite = suite;
this.dom = dom;
this.views = views;
this.element = this.createDom('div', { className: 'suite' },
this.createDom('a', { className: 'description', href: '?spec=' + encodeURIComponent(this.suite.getFullName()) }, this.suite.description)
);
this.appendToSummary(this.suite, this.element);
};
jasmine.HtmlReporter.SuiteView.prototype.status = function() {
return this.getSpecStatus(this.suite);
};
jasmine.HtmlReporter.SuiteView.prototype.refresh = function() {
this.element.className += " " + this.status();
};
jasmine.HtmlReporterHelpers.addHelpers(jasmine.HtmlReporter.SuiteView);
/* @deprecated Use jasmine.HtmlReporter instead
*/
jasmine.TrivialReporter = function(doc) {
this.document = doc || document;
this.suiteDivs = {};
this.logRunningSpecs = false;
};
jasmine.TrivialReporter.prototype.createDom = function(type, attrs, childrenVarArgs) {
var el = document.createElement(type);
for (var i = 2; i < arguments.length; i++) {
var child = arguments[i];
if (typeof child === 'string') {
el.appendChild(document.createTextNode(child));
} else {
if (child) { el.appendChild(child); }
}
}
for (var attr in attrs) {
if (attr == "className") {
el[attr] = attrs[attr];
} else {
el.setAttribute(attr, attrs[attr]);
}
}
return el;
};
jasmine.TrivialReporter.prototype.reportRunnerStarting = function(runner) {
var showPassed, showSkipped;
this.outerDiv = this.createDom('div', { id: 'TrivialReporter', className: 'jasmine_reporter' },
this.createDom('div', { className: 'banner' },
this.createDom('div', { className: 'logo' },
this.createDom('span', { className: 'title' }, "Jasmine"),
this.createDom('span', { className: 'version' }, runner.env.versionString())),
this.createDom('div', { className: 'options' },
"Show ",
showPassed = this.createDom('input', { id: "__jasmine_TrivialReporter_showPassed__", type: 'checkbox' }),
this.createDom('label', { "for": "__jasmine_TrivialReporter_showPassed__" }, " passed "),
showSkipped = this.createDom('input', { id: "__jasmine_TrivialReporter_showSkipped__", type: 'checkbox' }),
this.createDom('label', { "for": "__jasmine_TrivialReporter_showSkipped__" }, " skipped")
)
),
this.runnerDiv = this.createDom('div', { className: 'runner running' },
this.createDom('a', { className: 'run_spec', href: '?' }, "run all"),
this.runnerMessageSpan = this.createDom('span', {}, "Running..."),
this.finishedAtSpan = this.createDom('span', { className: 'finished-at' }, ""))
);
this.document.body.appendChild(this.outerDiv);
var suites = runner.suites();
for (var i = 0; i < suites.length; i++) {
var suite = suites[i];
var suiteDiv = this.createDom('div', { className: 'suite' },
this.createDom('a', { className: 'run_spec', href: '?spec=' + encodeURIComponent(suite.getFullName()) }, "run"),
this.createDom('a', { className: 'description', href: '?spec=' + encodeURIComponent(suite.getFullName()) }, suite.description));
this.suiteDivs[suite.id] = suiteDiv;
var parentDiv = this.outerDiv;
if (suite.parentSuite) {
parentDiv = this.suiteDivs[suite.parentSuite.id];
}
parentDiv.appendChild(suiteDiv);
}
this.startedAt = new Date();
var self = this;
showPassed.onclick = function(evt) {
if (showPassed.checked) {
self.outerDiv.className += ' show-passed';
} else {
self.outerDiv.className = self.outerDiv.className.replace(/ show-passed/, '');
}
};
showSkipped.onclick = function(evt) {
if (showSkipped.checked) {
self.outerDiv.className += ' show-skipped';
} else {
self.outerDiv.className = self.outerDiv.className.replace(/ show-skipped/, '');
}
};
};
jasmine.TrivialReporter.prototype.reportRunnerResults = function(runner) {
var results = runner.results();
var className = (results.failedCount > 0) ? "runner failed" : "runner passed";
this.runnerDiv.setAttribute("class", className);
//do it twice for IE
this.runnerDiv.setAttribute("className", className);
var specs = runner.specs();
var specCount = 0;
for (var i = 0; i < specs.length; i++) {
if (this.specFilter(specs[i])) {
specCount++;
}
}
var message = "" + specCount + " spec" + (specCount == 1 ? "" : "s" ) + ", " + results.failedCount + " failure" + ((results.failedCount == 1) ? "" : "s");
message += " in " + ((new Date().getTime() - this.startedAt.getTime()) / 1000) + "s";
this.runnerMessageSpan.replaceChild(this.createDom('a', { className: 'description', href: '?'}, message), this.runnerMessageSpan.firstChild);
this.finishedAtSpan.appendChild(document.createTextNode("Finished at " + new Date().toString()));
};
jasmine.TrivialReporter.prototype.reportSuiteResults = function(suite) {
var results = suite.results();
var status = results.passed() ? 'passed' : 'failed';
if (results.totalCount === 0) { // todo: change this to check results.skipped
status = 'skipped';
}
this.suiteDivs[suite.id].className += " " + status;
};
jasmine.TrivialReporter.prototype.reportSpecStarting = function(spec) {
if (this.logRunningSpecs) {
this.log('>> Jasmine Running ' + spec.suite.description + ' ' + spec.description + '...');
}
};
jasmine.TrivialReporter.prototype.reportSpecResults = function(spec) {
var results = spec.results();
var status = results.passed() ? 'passed' : 'failed';
if (results.skipped) {
status = 'skipped';
}
var specDiv = this.createDom('div', { className: 'spec ' + status },
this.createDom('a', { className: 'run_spec', href: '?spec=' + encodeURIComponent(spec.getFullName()) }, "run"),
this.createDom('a', {
className: 'description',
href: '?spec=' + encodeURIComponent(spec.getFullName()),
title: spec.getFullName()
}, spec.description));
var resultItems = results.getItems();
var messagesDiv = this.createDom('div', { className: 'messages' });
for (var i = 0; i < resultItems.length; i++) {
var result = resultItems[i];
if (result.type == 'log') {
messagesDiv.appendChild(this.createDom('div', {className: 'resultMessage log'}, result.toString()));
} else if (result.type == 'expect' && result.passed && !result.passed()) {
messagesDiv.appendChild(this.createDom('div', {className: 'resultMessage fail'}, result.message));
if (result.trace.stack) {
messagesDiv.appendChild(this.createDom('div', {className: 'stackTrace'}, result.trace.stack));
}
}
}
if (messagesDiv.childNodes.length > 0) {
specDiv.appendChild(messagesDiv);
}
this.suiteDivs[spec.suite.id].appendChild(specDiv);
};
jasmine.TrivialReporter.prototype.log = function() {
var console = jasmine.getGlobal().console;
if (console && console.log) {
if (console.log.apply) {
console.log.apply(console, arguments);
} else {
console.log(arguments); // ie fix: console.log.apply doesn't exist on ie
}
}
};
jasmine.TrivialReporter.prototype.getLocation = function() {
return this.document.location;
};
jasmine.TrivialReporter.prototype.specFilter = function(spec) {
var paramMap = {};
var params = this.getLocation().search.substring(1).split('&');
for (var i = 0; i < params.length; i++) {
var p = params[i].split('=');
paramMap[decodeURIComponent(p[0])] = decodeURIComponent(p[1]);
}
if (!paramMap.spec) {
return true;
}
return spec.getFullName().indexOf(paramMap.spec) === 0;
};<|fim▁end|>
| |
<|file_name|>teamstestsutils.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from django.core.management import call_command
from teams.models import Team, TeamMember, Workflow
from widget.rpc import Rpc
def refresh_obj(m):
return m.__class__._default_manager.get(pk=m.pk)
def reset_solr():
# cause the default site to load
from haystack import backend
sb = backend.SearchBackend()
sb.clear()<|fim▁hole|><|fim▁end|>
|
call_command('update_index')
rpc = Rpc()
|
<|file_name|>dellos6.py<|end_file_name|><|fim▁begin|>#
# (c) 2016 Red Hat Inc.
#
# (c) 2017 Dell EMC.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from ansible.module_utils._text import to_text, to_bytes
from ansible.plugins.terminal import TerminalBase
from ansible.errors import AnsibleConnectionFailure
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$")
]
terminal_stderr_re = [
re.compile(br"% ?Bad secret"),
re.compile(br"(\bInterface is part of a port-channel\b)"),
re.compile(br"(\bThe maximum number of users have already been created\b)|(\bUse '-' for range\b)"),
re.compile(br"(?:incomplete|ambiguous) command", re.I),
re.compile(br"connection timed out", re.I),
re.compile(br"'[^']' +returned error code: ?\d+"),
re.compile(br"Invalid|invalid.*$", re.I),
re.compile(br"((\bout of range\b)|(\bnot found\b)|(\bCould not\b)|(\bUnable to\b)|(\bCannot\b)|(\bError\b)).*", re.I),
re.compile(br"((\balready exists\b)|(\bnot exist\b)|(\bnot active\b)|(\bFailed\b)|(\bIncorrect\b)|(\bnot enabled\b)).*", re.I),
]
terminal_initial_prompt = br"\(y/n\)"
terminal_initial_answer = b"y"
terminal_inital_prompt_newline = False
def on_open_shell(self):
try:
self._exec_cli_command(b'terminal length 0')
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
def on_become(self, passwd=None):
if self._get_prompt().endswith(b'#'):
return
cmd = {u'command': u'enable'}
if passwd:
cmd[u'prompt'] = to_text(r"[\r\n]?password:$", errors='surrogate_or_strict')
cmd[u'answer'] = passwd
try:
self._exec_cli_command(to_bytes(json.dumps(cmd), errors='surrogate_or_strict'))
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to elevate privilege to enable mode')
# in dellos6 the terminal settings are accepted after the privilege mode
try:
self._exec_cli_command(b'terminal length 0')
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
<|fim▁hole|> return
if prompt.strip().endswith(b')#'):
self._exec_cli_command(b'end')
self._exec_cli_command(b'disable')
elif prompt.endswith(b'#'):
self._exec_cli_command(b'disable')<|fim▁end|>
|
def on_unbecome(self):
prompt = self._get_prompt()
if prompt is None:
# if prompt is None most likely the terminal is hung up at a prompt
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import Form from 'cerebral-module-forms/Form'
import submitForm from './chains/submitForm'
import resetForm from './chains/resetForm'
import validateForm from './chains/validateForm'
<|fim▁hole|> module.addState(Form({
name: {
value: '',
isRequired: true
},
email: {
value: '',
validations: ['isEmail'],
errorMessages: ['Not valid email'],
isRequired: true
},
password: {
value: '',
validations: ['equalsField:repeatPassword'],
dependsOn: 'simple.repeatPassword',
errorMessages: ['Not equal to repeated password'],
isRequired: true
},
repeatPassword: {
value: '',
validations: ['equalsField:password'],
dependsOn: 'simple.password',
errorMessages: ['Not equal to password'],
isRequired: true
},
address: Form({
street: {
value: ''
},
postalCode: {
value: '',
validations: ['isLength:4', 'isNumeric'],
errorMessages: ['Has to be length 4', 'Can only contain numbers']
}
})
}))
module.addSignals({
formSubmitted: submitForm,
resetClicked: resetForm,
validateFormClicked: validateForm
})
}
}<|fim▁end|>
|
export default (options = {}) => {
return (module, controller) => {
|
<|file_name|>SkidSteerDrivePlugin.cc<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2012-2014 Open Source Robotics Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|> *
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <string>
#include "gazebo/physics/physics.hh"
#include "gazebo/transport/transport.hh"
#include "plugins/SkidSteerDrivePlugin.hh"
using namespace gazebo;
GZ_REGISTER_MODEL_PLUGIN(SkidSteerDrivePlugin)
/////////////////////////////////////////////////
SkidSteerDrivePlugin::SkidSteerDrivePlugin()
{
this->maxForce = 5.0;
this->wheelRadius = 0.0;
this->wheelSeparation = 0.0;
}
/////////////////////////////////////////////////
int SkidSteerDrivePlugin::RegisterJoint(int _index, const std::string &_name)
{
// Bounds checking on index
if (_index < 0 or _index >= NUMBER_OF_WHEELS)
{
gzerr << "Joint index " << _index << " out of bounds [0, "
<< NUMBER_OF_WHEELS << "] in model " << this->model->GetName()
<< "." << std::endl;
}
// Find the specified joint and add it to out list
this->joints[_index] = this->model->GetJoint(_name);
if (!this->joints[_index])
{
gzerr << "Unable to find the " << _name
<< " joint in model " << this->model->GetName() << "." << std::endl;
return 1;
}
// Success!
return 0;
}
/////////////////////////////////////////////////
void SkidSteerDrivePlugin::Load(physics::ModelPtr _model,
sdf::ElementPtr _sdf)
{
this->model = _model;
this->node = transport::NodePtr(new transport::Node());
this->node->Init(this->model->GetWorld()->GetName());
int err = 0;
err += RegisterJoint(RIGHT_FRONT, "right_front");
err += RegisterJoint(RIGHT_REAR, "right_rear");
err += RegisterJoint(LEFT_FRONT, "left_front");
err += RegisterJoint(LEFT_REAR, "left_rear");
if (err > 0)
return;
if (_sdf->HasElement("max_force"))
this->maxForce = _sdf->GetElement("max_force")->Get<double>();
else
gzwarn << "No MaxForce value set in the model sdf, default value is 5.0.\n";
// This assumes that front and rear wheel spacing is identical
this->wheelSeparation = this->joints[RIGHT_FRONT]->GetAnchor(0).Distance(
this->joints[LEFT_FRONT]->GetAnchor(0));
// This assumes that the largest dimension of the wheel is the diameter
// and that all wheels have the same diameter
physics::EntityPtr wheelLink = boost::dynamic_pointer_cast<physics::Entity>(
this->joints[RIGHT_FRONT]->GetChild() );
if (wheelLink)
{
math::Box bb = wheelLink->GetBoundingBox();
this->wheelRadius = bb.GetSize().GetMax() * 0.5;
}
// Validity checks...
if (this->wheelSeparation <= 0)
{
gzerr << "Unable to find the wheel separation distance." << std::endl
<< " This could mean that the right_front link and the left_front "
<< "link are overlapping." << std::endl;
return;
}
if (this->wheelRadius <= 0)
{
gzerr << "Unable to find the wheel radius." << std::endl
<< " This could mean that the sdf is missing a wheel link on "
<< "the right_front joint." << std::endl;
return;
}
this->velSub = this->node->Subscribe(
std::string("~/") + this->model->GetName() + std::string("/vel_cmd"),
&SkidSteerDrivePlugin::OnVelMsg, this);
}
/////////////////////////////////////////////////
void SkidSteerDrivePlugin::OnVelMsg(ConstPosePtr &_msg)
{
// gzmsg << "cmd_vel: " << msg->position().x() << ", "
// << msgs::Convert(msg->orientation()).GetAsEuler().z << std::endl;
for (int i = 0; i < NUMBER_OF_WHEELS; i++)
this->joints[i]->SetMaxForce(0, this->maxForce);
double vel_lin = _msg->position().x() / this->wheelRadius;
double vel_rot = -1 * msgs::Convert(_msg->orientation()).GetAsEuler().z
* (this->wheelSeparation / this->wheelRadius);
this->joints[RIGHT_FRONT]->SetVelocity(0, vel_lin - vel_rot);
this->joints[RIGHT_REAR ]->SetVelocity(0, vel_lin - vel_rot);
this->joints[LEFT_FRONT ]->SetVelocity(0, vel_lin + vel_rot);
this->joints[LEFT_REAR ]->SetVelocity(0, vel_lin + vel_rot);
}<|fim▁end|>
|
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
|
<|file_name|>dao_contact.go<|end_file_name|><|fim▁begin|>package user
import (
"context"
"regexp"
"time"
"github.com/go-gorp/gorp"
"github.com/rockbears/log"
"github.com/ovh/cds/engine/api/database/gorpmapping"
"github.com/ovh/cds/engine/gorpmapper"
"github.com/ovh/cds/sdk"
)
func getContacts(ctx context.Context, db gorp.SqlExecutor, q gorpmapping.Query) ([]sdk.UserContact, error) {
cs := []userContact{}
if err := gorpmapping.GetAll(ctx, db, q, &cs); err != nil {
return nil, sdk.WrapError(err, "cannot get user contacts")
}
// Check signature of data, if invalid do not return it
verifiedUserContacts := make([]sdk.UserContact, 0, len(cs))
for i := range cs {
isValid, err := gorpmapping.CheckSignature(cs[i], cs[i].Signature)
if err != nil {
return nil, err
}
if !isValid {
log.Error(ctx, "user.getContacts> user contact %d data corrupted", cs[i].ID)
continue
}
verifiedUserContacts = append(verifiedUserContacts, cs[i].UserContact)
}
return verifiedUserContacts, nil
}
func getContact(ctx context.Context, db gorp.SqlExecutor, q gorpmapping.Query) (*sdk.UserContact, error) {
var uc userContact
found, err := gorpmapping.Get(ctx, db, q, &uc)
if err != nil {
return nil, sdk.WrapError(err, "cannot get user contact")
}
if !found {
return nil, sdk.WithStack(sdk.ErrNotFound)
}
isValid, err := gorpmapping.CheckSignature(uc, uc.Signature)
if err != nil {
return nil, err
}
if !isValid {
log.Error(ctx, "user.getContact> user contact %d (for user %s) data corrupted", uc.ID, uc.UserID)
return nil, sdk.WithStack(sdk.ErrNotFound)
}
return &uc.UserContact, nil
}
// LoadContactsByUserIDs returns all contacts from database for given user ids.
func LoadContactsByUserIDs(ctx context.Context, db gorp.SqlExecutor, userIDs []string) ([]sdk.UserContact, error) {
query := gorpmapping.NewQuery(`
SELECT *
FROM user_contact
WHERE user_id = ANY(string_to_array($1, ',')::text[])
ORDER BY id ASC
`).Args(gorpmapping.IDStringsToQueryString(userIDs))
return getContacts(ctx, db, query)
}
// LoadContactByTypeAndValue returns a contact for given type and value.
func LoadContactByTypeAndValue(ctx context.Context, db gorp.SqlExecutor, contactType, value string) (*sdk.UserContact, error) {
query := gorpmapping.NewQuery(`
SELECT *
FROM user_contact
WHERE type = $1 AND value = $2
`).Args(contactType, value)
return getContact(ctx, db, query)<|fim▁hole|>var emailRegexp = regexp.MustCompile(`\w[+-._\w]*\w@\w[-._\w]*\w\.\w*`)
// InsertContact in database.
func InsertContact(ctx context.Context, db gorpmapper.SqlExecutorWithTx, c *sdk.UserContact) error {
if c.Type == sdk.UserContactTypeEmail {
if !emailRegexp.MatchString(c.Value) {
return sdk.WithStack(sdk.ErrInvalidEmail)
}
}
c.Created = time.Now()
dbc := userContact{UserContact: *c}
if err := gorpmapping.InsertAndSign(ctx, db, &dbc); err != nil {
return sdk.WrapError(err, "unable to insert contact userID:%s type:%s value:%s", dbc.UserID, dbc.Type, dbc.Value)
}
*c = dbc.UserContact
return nil
}
// UpdateContact in database.
func UpdateContact(ctx context.Context, db gorpmapper.SqlExecutorWithTx, c *sdk.UserContact) error {
dbc := userContact{UserContact: *c}
if err := gorpmapping.UpdateAndSign(ctx, db, &dbc); err != nil {
return err
}
*c = dbc.UserContact
return nil
}<|fim▁end|>
|
}
|
<|file_name|>queryz.go<|end_file_name|><|fim▁begin|>/*
Copyright 2019 The Vitess Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package tabletserver
import (
"fmt"
"html/template"
"net/http"
"sort"
"time"
"vitess.io/vitess/go/acl"
"vitess.io/vitess/go/vt/log"
"vitess.io/vitess/go/vt/logz"
"vitess.io/vitess/go/vt/sqlparser"
"vitess.io/vitess/go/vt/vttablet/tabletserver/planbuilder"
)
var (
queryzHeader = []byte(`<thead>
<tr>
<th>Query</th>
<th>Table</th>
<th>Plan</th>
<th>Count</th>
<th>Time</th>
<th>MySQL Time</th>
<th>Rows affected</th>
<th>Rows returned</th>
<th>Errors</th>
<th>Time per query</th>
<th>MySQL Time per query</th>
<th>Rows affected per query</th>
<th>Rows returned per query</th>
<th>Errors per query</th>
</tr>
</thead>
`)
queryzTmpl = template.Must(template.New("example").Parse(`
<tr class="{{.Color}}">
<td>{{.Query}}</td>
<td>{{.Table}}</td>
<td>{{.Plan}}</td>
<td>{{.Count}}</td>
<td>{{.Time}}</td>
<td>{{.MysqlTime}}</td>
<td>{{.RowsAffected}}</td>
<td>{{.RowsReturned}}</td>
<td>{{.Errors}}</td>
<td>{{.TimePQ}}</td>
<td>{{.MysqlTimePQ}}</td>
<td>{{.RowsAffectedPQ}}</td>
<td>{{.RowsReturnedPQ}}</td>
<td>{{.ErrorsPQ}}</td>
</tr>
`))
)
// queryzRow is used for rendering query stats
// using go's template.
type queryzRow struct {
Query string
Table string
Plan planbuilder.PlanType
Count uint64
tm time.Duration
mysqlTime time.Duration
RowsAffected uint64
RowsReturned uint64
Errors uint64
Color string
}
// Time returns the total time as a string.
func (qzs *queryzRow) Time() string {
return fmt.Sprintf("%.6f", float64(qzs.tm)/1e9)
}
func (qzs *queryzRow) timePQ() float64 {
return float64(qzs.tm) / (1e9 * float64(qzs.Count))
}
// TimePQ returns the time per query as a string.
func (qzs *queryzRow) TimePQ() string {
return fmt.Sprintf("%.6f", qzs.timePQ())
}
// MysqlTime returns the MySQL time as a string.
func (qzs *queryzRow) MysqlTime() string {
return fmt.Sprintf("%.6f", float64(qzs.mysqlTime)/1e9)
}
// MysqlTimePQ returns the time per query as a string.
func (qzs *queryzRow) MysqlTimePQ() string {
val := float64(qzs.mysqlTime) / (1e9 * float64(qzs.Count))
return fmt.Sprintf("%.6f", val)
}
// RowsReturnedPQ returns the row count per query as a string.
func (qzs *queryzRow) RowsReturnedPQ() string {<|fim▁hole|> return fmt.Sprintf("%.6f", val)
}
// RowsAffectedPQ returns the row count per query as a string.
func (qzs *queryzRow) RowsAffectedPQ() string {
val := float64(qzs.RowsAffected) / float64(qzs.Count)
return fmt.Sprintf("%.6f", val)
}
// ErrorsPQ returns the error count per query as a string.
func (qzs *queryzRow) ErrorsPQ() string {
return fmt.Sprintf("%.6f", float64(qzs.Errors)/float64(qzs.Count))
}
type queryzSorter struct {
rows []*queryzRow
less func(row1, row2 *queryzRow) bool
}
func (s *queryzSorter) Len() int { return len(s.rows) }
func (s *queryzSorter) Swap(i, j int) { s.rows[i], s.rows[j] = s.rows[j], s.rows[i] }
func (s *queryzSorter) Less(i, j int) bool { return s.less(s.rows[i], s.rows[j]) }
func queryzHandler(qe *QueryEngine, w http.ResponseWriter, r *http.Request) {
if err := acl.CheckAccessHTTP(r, acl.DEBUGGING); err != nil {
acl.SendError(w, err)
return
}
logz.StartHTMLTable(w)
defer logz.EndHTMLTable(w)
w.Write(queryzHeader)
sorter := queryzSorter{
rows: nil,
less: func(row1, row2 *queryzRow) bool {
return row1.timePQ() > row2.timePQ()
},
}
qe.plans.ForEach(func(value interface{}) bool {
plan := value.(*TabletPlan)
if plan == nil {
return true
}
Value := &queryzRow{
Query: logz.Wrappable(sqlparser.TruncateForUI(plan.Original)),
Table: plan.TableName().String(),
Plan: plan.PlanID,
}
Value.Count, Value.tm, Value.mysqlTime, Value.RowsAffected, Value.RowsReturned, Value.Errors = plan.Stats()
var timepq time.Duration
if Value.Count != 0 {
timepq = Value.tm / time.Duration(Value.Count)
}
if timepq < 10*time.Millisecond {
Value.Color = "low"
} else if timepq < 100*time.Millisecond {
Value.Color = "medium"
} else {
Value.Color = "high"
}
sorter.rows = append(sorter.rows, Value)
return true
})
sort.Sort(&sorter)
for _, Value := range sorter.rows {
if err := queryzTmpl.Execute(w, Value); err != nil {
log.Errorf("queryz: couldn't execute template: %v", err)
}
}
}<|fim▁end|>
|
val := float64(qzs.RowsReturned) / float64(qzs.Count)
|
<|file_name|>216_havana.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.changeset import UniqueConstraint
from migrate import ForeignKeyConstraint
from oslo_log import log as logging
from sqlalchemy import Boolean, BigInteger, Column, DateTime, Enum, Float
from sqlalchemy import dialects
from sqlalchemy import ForeignKey, Index, Integer, MetaData, String, Table
from sqlalchemy import Text
from sqlalchemy.types import NullType
from nova.i18n import _LE
LOG = logging.getLogger(__name__)
# Note on the autoincrement flag: this is defaulted for primary key columns
# of integral type, so is no longer set explicitly in such cases.
# NOTE(dprince): This wrapper allows us to easily match the Folsom MySQL
# Schema. In Folsom we created tables as latin1 and converted them to utf8
# later. This conversion causes some of the Text columns on MySQL to get
# created as mediumtext instead of just text.
def MediumText():
return Text().with_variant(dialects.mysql.MEDIUMTEXT(), 'mysql')
def Inet():
return String(length=43).with_variant(dialects.postgresql.INET(),
'postgresql')
def InetSmall():
return String(length=39).with_variant(dialects.postgresql.INET(),
'postgresql')
def _create_shadow_tables(migrate_engine):
meta = MetaData(migrate_engine)
meta.reflect(migrate_engine)
table_names = list(meta.tables.keys())
meta.bind = migrate_engine
for table_name in table_names:
table = Table(table_name, meta, autoload=True)
columns = []
for column in table.columns:
column_copy = None
# NOTE(boris-42): BigInteger is not supported by sqlite, so
# after copy it will have NullType, other
# types that are used in Nova are supported by
# sqlite.
if isinstance(column.type, NullType):
column_copy = Column(column.name, BigInteger(), default=0)
if table_name == 'instances' and column.name == 'locked_by':
enum = Enum('owner', 'admin',
name='shadow_instances0locked_by')
column_copy = Column(column.name, enum)
else:
column_copy = column.copy()
columns.append(column_copy)
shadow_table_name = 'shadow_' + table_name
shadow_table = Table(shadow_table_name, meta, *columns,
mysql_engine='InnoDB')
try:
shadow_table.create()
except Exception:
LOG.info(repr(shadow_table))
LOG.exception(_LE('Exception while creating table.'))
raise
# NOTE(dprince): we add these here so our schema contains dump tables
# which were added in migration 209 (in Havana). We can drop these in
# Icehouse: https://bugs.launchpad.net/nova/+bug/1266538
def _create_dump_tables(migrate_engine):
meta = MetaData(migrate_engine)
meta.reflect(migrate_engine)
table_names = ['compute_node_stats', 'compute_nodes', 'instance_actions',
'instance_actions_events', 'instance_faults', 'migrations']
for table_name in table_names:
table = Table(table_name, meta, autoload=True)
dump_table_name = 'dump_' + table.name
columns = []
for column in table.columns:
# NOTE(dprince): The dump_ tables were originally created from an
# earlier schema version so we don't want to add the pci_stats
# column so that schema diffs are exactly the same.
if column.name == 'pci_stats':
continue
else:
columns.append(column.copy())
table_dump = Table(dump_table_name, meta, *columns,
mysql_engine='InnoDB')
table_dump.create()
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
agent_builds = Table('agent_builds', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('hypervisor', String(length=255)),
Column('os', String(length=255)),
Column('architecture', String(length=255)),
Column('version', String(length=255)),
Column('url', String(length=255)),
Column('md5hash', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregate_hosts = Table('aggregate_hosts', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('host', String(length=255)),
Column('aggregate_id', Integer, ForeignKey('aggregates.id'),
nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregate_metadata = Table('aggregate_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('aggregate_id', Integer, ForeignKey('aggregates.id'),
nullable=False),
Column('key', String(length=255), nullable=False),
Column('value', String(length=255), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
aggregates = Table('aggregates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('name', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
block_device_mapping = Table('block_device_mapping', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('device_name', String(length=255), nullable=True),
Column('delete_on_termination', Boolean),
Column('snapshot_id', String(length=36), nullable=True),
Column('volume_id', String(length=36), nullable=True),
Column('volume_size', Integer),
Column('no_device', Boolean),
Column('connection_info', MediumText()),
Column('instance_uuid', String(length=36)),
Column('deleted', Integer),
Column('source_type', String(length=255), nullable=True),
Column('destination_type', String(length=255), nullable=True),
Column('guest_format', String(length=255), nullable=True),
Column('device_type', String(length=255), nullable=True),
Column('disk_bus', String(length=255), nullable=True),
Column('boot_index', Integer),
Column('image_id', String(length=36), nullable=True),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
bw_usage_cache = Table('bw_usage_cache', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('start_period', DateTime, nullable=False),
Column('last_refreshed', DateTime),
Column('bw_in', BigInteger),
Column('bw_out', BigInteger),
Column('mac', String(length=255)),
Column('uuid', String(length=36)),
Column('last_ctr_in', BigInteger()),
Column('last_ctr_out', BigInteger()),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
cells = Table('cells', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('api_url', String(length=255)),
Column('weight_offset', Float),
Column('weight_scale', Float),
Column('name', String(length=255)),
Column('is_parent', Boolean),
Column('deleted', Integer),
Column('transport_url', String(length=255), nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
certificates = Table('certificates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('file_name', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
compute_node_stats = Table('compute_node_stats', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('compute_node_id', Integer, nullable=False),
Column('key', String(length=255), nullable=False),
Column('value', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
compute_nodes = Table('compute_nodes', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('service_id', Integer, nullable=False),
Column('vcpus', Integer, nullable=False),
Column('memory_mb', Integer, nullable=False),
Column('local_gb', Integer, nullable=False),
Column('vcpus_used', Integer, nullable=False),
Column('memory_mb_used', Integer, nullable=False),
Column('local_gb_used', Integer, nullable=False),
Column('hypervisor_type', MediumText(), nullable=False),
Column('hypervisor_version', Integer, nullable=False),
Column('cpu_info', MediumText(), nullable=False),
Column('disk_available_least', Integer),
Column('free_ram_mb', Integer),
Column('free_disk_gb', Integer),
Column('current_workload', Integer),
Column('running_vms', Integer),
Column('hypervisor_hostname', String(length=255)),
Column('deleted', Integer),
Column('host_ip', InetSmall()),
Column('supported_instances', Text),
Column('pci_stats', Text, nullable=True),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
console_pools = Table('console_pools', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', InetSmall()),
Column('username', String(length=255)),
Column('password', String(length=255)),
Column('console_type', String(length=255)),
Column('public_hostname', String(length=255)),
Column('host', String(length=255)),
Column('compute_host', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
consoles_instance_uuid_column_args = ['instance_uuid', String(length=36)]
consoles_instance_uuid_column_args.append(
ForeignKey('instances.uuid', name='consoles_instance_uuid_fkey'))
consoles = Table('consoles', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_name', String(length=255)),
Column('password', String(length=255)),
Column('port', Integer),
Column('pool_id', Integer, ForeignKey('console_pools.id')),
Column(*consoles_instance_uuid_column_args),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
dns_domains = Table('dns_domains', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Boolean),
Column('domain', String(length=255), primary_key=True, nullable=False),
Column('scope', String(length=255)),
Column('availability_zone', String(length=255)),
Column('project_id', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
fixed_ips = Table('fixed_ips', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', InetSmall()),
Column('network_id', Integer),
Column('allocated', Boolean),
Column('leased', Boolean),
Column('reserved', Boolean),
Column('virtual_interface_id', Integer),
Column('host', String(length=255)),
Column('instance_uuid', String(length=36)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
floating_ips = Table('floating_ips', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', InetSmall()),
Column('fixed_ip_id', Integer),
Column('project_id', String(length=255)),
Column('host', String(length=255)),
Column('auto_assigned', Boolean),
Column('pool', String(length=255)),
Column('interface', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_faults = Table('instance_faults', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_uuid', String(length=36)),
Column('code', Integer, nullable=False),
Column('message', String(length=255)),
Column('details', MediumText()),
Column('host', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_id_mappings = Table('instance_id_mappings', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_info_caches = Table('instance_info_caches', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('network_info', MediumText()),
Column('instance_uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
groups = Table('instance_groups', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('uuid', String(length=36), nullable=False),
Column('name', String(length=255)),
UniqueConstraint('uuid', 'deleted',
name='uniq_instance_groups0uuid0deleted'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_metadata = Table('instance_group_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('key', String(length=255)),
Column('value', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_policy = Table('instance_group_policy', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('policy', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
group_member = Table('instance_group_member', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_id', String(length=255)),
Column('group_id', Integer, ForeignKey('instance_groups.id'),
nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
instance_metadata = Table('instance_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('key', String(length=255)),
Column('value', String(length=255)),
Column('instance_uuid', String(length=36), nullable=True),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_system_metadata = Table('instance_system_metadata', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_uuid', String(length=36), nullable=False),
Column('key', String(length=255), nullable=False),
Column('value', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_type_extra_specs = Table('instance_type_extra_specs', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_type_id', Integer, ForeignKey('instance_types.id'),
nullable=False),
Column('key', String(length=255)),
Column('value', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_type_projects = Table('instance_type_projects', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('instance_type_id', Integer, nullable=False),
Column('project_id', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_types = Table('instance_types', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('name', String(length=255)),
Column('id', Integer, primary_key=True, nullable=False),
Column('memory_mb', Integer, nullable=False),
Column('vcpus', Integer, nullable=False),
Column('swap', Integer, nullable=False),
Column('vcpu_weight', Integer),
Column('flavorid', String(length=255)),
Column('rxtx_factor', Float),
Column('root_gb', Integer),
Column('ephemeral_gb', Integer),
Column('disabled', Boolean),
Column('is_public', Boolean),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
inst_lock_enum = Enum('owner', 'admin', name='instances0locked_by')
instances = Table('instances', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('internal_id', Integer),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('image_ref', String(length=255)),
Column('kernel_id', String(length=255)),
Column('ramdisk_id', String(length=255)),
Column('launch_index', Integer),
Column('key_name', String(length=255)),
Column('key_data', MediumText()),
Column('power_state', Integer),
Column('vm_state', String(length=255)),
Column('memory_mb', Integer),
Column('vcpus', Integer),
Column('hostname', String(length=255)),
Column('host', String(length=255)),
Column('user_data', MediumText()),
Column('reservation_id', String(length=255)),
Column('scheduled_at', DateTime),
Column('launched_at', DateTime),
Column('terminated_at', DateTime),
Column('display_name', String(length=255)),
Column('display_description', String(length=255)),
Column('availability_zone', String(length=255)),
Column('locked', Boolean),
Column('os_type', String(length=255)),
Column('launched_on', MediumText()),
Column('instance_type_id', Integer),
Column('vm_mode', String(length=255)),
Column('uuid', String(length=36)),
Column('architecture', String(length=255)),
Column('root_device_name', String(length=255)),
Column('access_ip_v4', InetSmall()),
Column('access_ip_v6', InetSmall()),
Column('config_drive', String(length=255)),
Column('task_state', String(length=255)),
Column('default_ephemeral_device', String(length=255)),
Column('default_swap_device', String(length=255)),
Column('progress', Integer),
Column('auto_disk_config', Boolean),
Column('shutdown_terminate', Boolean),
Column('disable_terminate', Boolean),
Column('root_gb', Integer),
Column('ephemeral_gb', Integer),
Column('cell_name', String(length=255)),
Column('node', String(length=255)),
Column('deleted', Integer),
Column('locked_by', inst_lock_enum),
Column('cleaned', Integer, default=0),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instance_actions = Table('instance_actions', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('action', String(length=255)),
Column('instance_uuid', String(length=36)),
Column('request_id', String(length=255)),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('start_time', DateTime),
Column('finish_time', DateTime),
Column('message', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
instance_actions_events = Table('instance_actions_events', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('event', String(length=255)),
Column('action_id', Integer, ForeignKey('instance_actions.id')),
Column('start_time', DateTime),
Column('finish_time', DateTime),
Column('result', String(length=255)),
Column('traceback', Text),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
iscsi_targets = Table('iscsi_targets', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('target_num', Integer),
Column('host', String(length=255)),
Column('volume_id', String(length=36), nullable=True),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
<|fim▁hole|> Column('id', Integer, primary_key=True, nullable=False),
Column('name', String(length=255)),
Column('user_id', String(length=255)),
Column('fingerprint', String(length=255)),
Column('public_key', MediumText()),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
migrations = Table('migrations', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('source_compute', String(length=255)),
Column('dest_compute', String(length=255)),
Column('dest_host', String(length=255)),
Column('status', String(length=255)),
Column('instance_uuid', String(length=36)),
Column('old_instance_type_id', Integer),
Column('new_instance_type_id', Integer),
Column('source_node', String(length=255)),
Column('dest_node', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
networks = Table('networks', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('injected', Boolean),
Column('cidr', Inet()),
Column('netmask', InetSmall()),
Column('bridge', String(length=255)),
Column('gateway', InetSmall()),
Column('broadcast', InetSmall()),
Column('dns1', InetSmall()),
Column('vlan', Integer),
Column('vpn_public_address', InetSmall()),
Column('vpn_public_port', Integer),
Column('vpn_private_address', InetSmall()),
Column('dhcp_start', InetSmall()),
Column('project_id', String(length=255)),
Column('host', String(length=255)),
Column('cidr_v6', Inet()),
Column('gateway_v6', InetSmall()),
Column('label', String(length=255)),
Column('netmask_v6', InetSmall()),
Column('bridge_interface', String(length=255)),
Column('multi_host', Boolean),
Column('dns2', InetSmall()),
Column('uuid', String(length=36)),
Column('priority', Integer),
Column('rxtx_base', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
pci_devices_uc_name = 'uniq_pci_devices0compute_node_id0address0deleted'
pci_devices = Table('pci_devices', meta,
Column('created_at', DateTime(timezone=False)),
Column('updated_at', DateTime(timezone=False)),
Column('deleted_at', DateTime(timezone=False)),
Column('deleted', Integer, default=0, nullable=False),
Column('id', Integer, primary_key=True),
Column('compute_node_id', Integer, nullable=False),
Column('address', String(12), nullable=False),
Column('product_id', String(4)),
Column('vendor_id', String(4)),
Column('dev_type', String(8)),
Column('dev_id', String(255)),
Column('label', String(255), nullable=False),
Column('status', String(36), nullable=False),
Column('extra_info', Text, nullable=True),
Column('instance_uuid', String(36), nullable=True),
Index('ix_pci_devices_compute_node_id_deleted',
'compute_node_id', 'deleted'),
Index('ix_pci_devices_instance_uuid_deleted',
'instance_uuid', 'deleted'),
UniqueConstraint('compute_node_id',
'address', 'deleted',
name=pci_devices_uc_name),
mysql_engine='InnoDB',
mysql_charset='utf8')
provider_fw_rules = Table('provider_fw_rules', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('protocol', String(length=5)),
Column('from_port', Integer),
Column('to_port', Integer),
Column('cidr', Inet()),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
quota_classes = Table('quota_classes', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('class_name', String(length=255)),
Column('resource', String(length=255)),
Column('hard_limit', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
quota_usages = Table('quota_usages', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('project_id', String(length=255)),
Column('resource', String(length=255)),
Column('in_use', Integer, nullable=False),
Column('reserved', Integer, nullable=False),
Column('until_refresh', Integer),
Column('deleted', Integer),
Column('user_id', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
quotas = Table('quotas', meta,
Column('id', Integer, primary_key=True, nullable=False),
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('project_id', String(length=255)),
Column('resource', String(length=255), nullable=False),
Column('hard_limit', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
uniq_name = "uniq_project_user_quotas0user_id0project_id0resource0deleted"
project_user_quotas = Table('project_user_quotas', meta,
Column('id', Integer, primary_key=True,
nullable=False),
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer),
Column('user_id',
String(length=255),
nullable=False),
Column('project_id',
String(length=255),
nullable=False),
Column('resource',
String(length=255),
nullable=False),
Column('hard_limit', Integer, nullable=True),
UniqueConstraint('user_id', 'project_id', 'resource',
'deleted', name=uniq_name),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
reservations = Table('reservations', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('usage_id', Integer, nullable=False),
Column('project_id', String(length=255)),
Column('resource', String(length=255)),
Column('delta', Integer, nullable=False),
Column('expire', DateTime),
Column('deleted', Integer),
Column('user_id', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
s3_images = Table('s3_images', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_group_instance_association = \
Table('security_group_instance_association', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('security_group_id', Integer),
Column('instance_uuid', String(length=36)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_group_rules = Table('security_group_rules', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('parent_group_id', Integer, ForeignKey('security_groups.id')),
Column('protocol', String(length=255)),
Column('from_port', Integer),
Column('to_port', Integer),
Column('cidr', Inet()),
Column('group_id', Integer, ForeignKey('security_groups.id')),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_groups = Table('security_groups', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('name', String(length=255)),
Column('description', String(length=255)),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_group_default_rules = Table('security_group_default_rules', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('deleted', Integer, default=0),
Column('id', Integer, primary_key=True, nullable=False),
Column('protocol', String(length=5)),
Column('from_port', Integer),
Column('to_port', Integer),
Column('cidr', Inet()),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
services = Table('services', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('host', String(length=255)),
Column('binary', String(length=255)),
Column('topic', String(length=255)),
Column('report_count', Integer, nullable=False),
Column('disabled', Boolean),
Column('deleted', Integer),
Column('disabled_reason', String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
snapshot_id_mappings = Table('snapshot_id_mappings', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
snapshots = Table('snapshots', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', String(length=36), primary_key=True, nullable=False),
Column('volume_id', String(length=36), nullable=False),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('status', String(length=255)),
Column('progress', String(length=255)),
Column('volume_size', Integer),
Column('scheduled_at', DateTime),
Column('display_name', String(length=255)),
Column('display_description', String(length=255)),
Column('deleted', String(length=36)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
task_log = Table('task_log', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('task_name', String(length=255), nullable=False),
Column('state', String(length=255), nullable=False),
Column('host', String(length=255), nullable=False),
Column('period_beginning', DateTime, nullable=False),
Column('period_ending', DateTime, nullable=False),
Column('message', String(length=255), nullable=False),
Column('task_items', Integer),
Column('errors', Integer),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
virtual_interfaces = Table('virtual_interfaces', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('address', String(length=255)),
Column('network_id', Integer),
Column('uuid', String(length=36)),
Column('instance_uuid', String(length=36), nullable=True),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
volume_id_mappings = Table('volume_id_mappings', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(length=36), nullable=False),
Column('deleted', Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
volumes = Table('volumes', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
Column('id', String(length=36), primary_key=True, nullable=False),
Column('ec2_id', String(length=255)),
Column('user_id', String(length=255)),
Column('project_id', String(length=255)),
Column('host', String(length=255)),
Column('size', Integer),
Column('availability_zone', String(length=255)),
Column('mountpoint', String(length=255)),
Column('status', String(length=255)),
Column('attach_status', String(length=255)),
Column('scheduled_at', DateTime),
Column('launched_at', DateTime),
Column('terminated_at', DateTime),
Column('display_name', String(length=255)),
Column('display_description', String(length=255)),
Column('provider_location', String(length=256)),
Column('provider_auth', String(length=256)),
Column('snapshot_id', String(length=36)),
Column('volume_type_id', Integer),
Column('instance_uuid', String(length=36)),
Column('attach_time', DateTime),
Column('deleted', String(length=36)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
volume_usage_cache = Table('volume_usage_cache', meta,
Column('created_at', DateTime(timezone=False)),
Column('updated_at', DateTime(timezone=False)),
Column('deleted_at', DateTime(timezone=False)),
Column('id', Integer(), primary_key=True, nullable=False),
Column('volume_id', String(36), nullable=False),
Column('tot_last_refreshed', DateTime(timezone=False)),
Column('tot_reads', BigInteger(), default=0),
Column('tot_read_bytes', BigInteger(), default=0),
Column('tot_writes', BigInteger(), default=0),
Column('tot_write_bytes', BigInteger(), default=0),
Column('curr_last_refreshed', DateTime(timezone=False)),
Column('curr_reads', BigInteger(), default=0),
Column('curr_read_bytes', BigInteger(), default=0),
Column('curr_writes', BigInteger(), default=0),
Column('curr_write_bytes', BigInteger(), default=0),
Column('deleted', Integer),
Column("instance_uuid", String(length=36)),
Column("project_id", String(length=36)),
Column("user_id", String(length=36)),
Column("availability_zone", String(length=255)),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
instances.create()
Index('project_id', instances.c.project_id).create()
Index('uuid', instances.c.uuid, unique=True).create()
# create all tables
tables = [aggregates, console_pools, instance_types,
security_groups, snapshots, volumes,
# those that are children and others later
agent_builds, aggregate_hosts, aggregate_metadata,
block_device_mapping, bw_usage_cache, cells,
certificates, compute_node_stats, compute_nodes, consoles,
dns_domains, fixed_ips, floating_ips,
instance_faults, instance_id_mappings, instance_info_caches,
instance_metadata, instance_system_metadata,
instance_type_extra_specs, instance_type_projects,
instance_actions, instance_actions_events,
groups, group_metadata, group_policy, group_member,
iscsi_targets, key_pairs, migrations, networks,
pci_devices, provider_fw_rules, quota_classes, quota_usages,
quotas, project_user_quotas,
reservations, s3_images, security_group_instance_association,
security_group_rules, security_group_default_rules,
services, snapshot_id_mappings, task_log,
virtual_interfaces,
volume_id_mappings,
volume_usage_cache]
for table in tables:
try:
table.create()
except Exception:
LOG.info(repr(table))
LOG.exception(_LE('Exception while creating table.'))
raise
# task log unique constraint
task_log_uc = "uniq_task_log0task_name0host0period_beginning0period_ending"
task_log_cols = ('task_name', 'host', 'period_beginning', 'period_ending')
uc = UniqueConstraint(*task_log_cols, table=task_log, name=task_log_uc)
uc.create()
# networks unique constraint
UniqueConstraint('vlan', 'deleted', table=networks,
name='uniq_networks0vlan0deleted').create()
# instance_type_name constraint
UniqueConstraint('name', 'deleted', table=instance_types,
name='uniq_instance_types0name0deleted').create()
# flavorid unique constraint
UniqueConstraint('flavorid', 'deleted', table=instance_types,
name='uniq_instance_types0flavorid0deleted').create()
# keypair contraint
UniqueConstraint('user_id', 'name', 'deleted', table=key_pairs,
name='uniq_key_pairs0user_id0name0deleted').create()
# instance_type_projects constraint
inst_type_uc_name = 'uniq_instance_type_projects0instance_type_id0' + \
'project_id0deleted'
UniqueConstraint('instance_type_id', 'project_id', 'deleted',
table=instance_type_projects,
name=inst_type_uc_name).create()
# floating_ips unique constraint
UniqueConstraint('address', 'deleted',
table=floating_ips,
name='uniq_floating_ips0address0deleted').create()
# instance_info_caches
UniqueConstraint('instance_uuid',
table=instance_info_caches,
name='uniq_instance_info_caches0instance_uuid').create()
UniqueConstraint('address', 'deleted',
table=virtual_interfaces,
name='uniq_virtual_interfaces0address0deleted').create()
# cells
UniqueConstraint('name', 'deleted',
table=cells,
name='uniq_cells0name0deleted').create()
# security_groups
uc = UniqueConstraint('project_id', 'name', 'deleted',
table=security_groups,
name='uniq_security_groups0project_id0name0deleted')
uc.create()
# quotas
UniqueConstraint('project_id', 'resource', 'deleted',
table=quotas,
name='uniq_quotas0project_id0resource0deleted').create()
# fixed_ips
UniqueConstraint('address', 'deleted',
table=fixed_ips,
name='uniq_fixed_ips0address0deleted').create()
# services
UniqueConstraint('host', 'topic', 'deleted',
table=services,
name='uniq_services0host0topic0deleted').create()
UniqueConstraint('host', 'binary', 'deleted',
table=services,
name='uniq_services0host0binary0deleted').create()
# agent_builds
uc_name = 'uniq_agent_builds0hypervisor0os0architecture0deleted'
UniqueConstraint('hypervisor', 'os', 'architecture', 'deleted',
table=agent_builds,
name=uc_name).create()
uc_name = 'uniq_console_pools0host0console_type0compute_host0deleted'
UniqueConstraint('host', 'console_type', 'compute_host', 'deleted',
table=console_pools,
name=uc_name).create()
uc_name = 'uniq_aggregate_hosts0host0aggregate_id0deleted'
UniqueConstraint('host', 'aggregate_id', 'deleted',
table=aggregate_hosts,
name=uc_name).create()
uc_name = 'uniq_aggregate_metadata0aggregate_id0key0deleted'
UniqueConstraint('aggregate_id', 'key', 'deleted',
table=aggregate_metadata,
name=uc_name).create()
uc_name = 'uniq_instance_type_extra_specs0instance_type_id0key0deleted'
UniqueConstraint('instance_type_id', 'key', 'deleted',
table=instance_type_extra_specs,
name=uc_name).create()
# created first (to preserve ordering for schema diffs)
mysql_pre_indexes = [
Index('instance_type_id', instance_type_projects.c.instance_type_id),
Index('project_id', dns_domains.c.project_id),
Index('fixed_ip_id', floating_ips.c.fixed_ip_id),
Index('network_id', virtual_interfaces.c.network_id),
Index('network_id', fixed_ips.c.network_id),
Index('fixed_ips_virtual_interface_id_fkey',
fixed_ips.c.virtual_interface_id),
Index('address', fixed_ips.c.address),
Index('fixed_ips_instance_uuid_fkey', fixed_ips.c.instance_uuid),
Index('instance_uuid', instance_system_metadata.c.instance_uuid),
Index('iscsi_targets_volume_id_fkey', iscsi_targets.c.volume_id),
Index('snapshot_id', block_device_mapping.c.snapshot_id),
Index('usage_id', reservations.c.usage_id),
Index('virtual_interfaces_instance_uuid_fkey',
virtual_interfaces.c.instance_uuid),
Index('volume_id', block_device_mapping.c.volume_id),
Index('security_group_id',
security_group_instance_association.c.security_group_id),
]
# Common indexes (indexes we apply to all databases)
# NOTE: order specific for MySQL diff support
common_indexes = [
# aggregate_metadata
Index('aggregate_metadata_key_idx', aggregate_metadata.c.key),
# agent_builds
Index('agent_builds_hypervisor_os_arch_idx',
agent_builds.c.hypervisor,
agent_builds.c.os,
agent_builds.c.architecture),
# block_device_mapping
Index('block_device_mapping_instance_uuid_idx',
block_device_mapping.c.instance_uuid),
Index('block_device_mapping_instance_uuid_device_name_idx',
block_device_mapping.c.instance_uuid,
block_device_mapping.c.device_name),
# NOTE(dprince): This is now a duplicate index on MySQL and needs to
# be removed there. We leave it here so the Index ordering
# matches on schema diffs (for MySQL).
# See Havana migration 186_new_bdm_format where we dropped the
# virtual_name column.
# IceHouse fix is here: https://bugs.launchpad.net/nova/+bug/1265839
Index(
'block_device_mapping_instance_uuid_virtual_name_device_name_idx',
block_device_mapping.c.instance_uuid,
block_device_mapping.c.device_name),
Index('block_device_mapping_instance_uuid_volume_id_idx',
block_device_mapping.c.instance_uuid,
block_device_mapping.c.volume_id),
# bw_usage_cache
Index('bw_usage_cache_uuid_start_period_idx',
bw_usage_cache.c.uuid, bw_usage_cache.c.start_period),
Index('certificates_project_id_deleted_idx',
certificates.c.project_id, certificates.c.deleted),
Index('certificates_user_id_deleted_idx', certificates.c.user_id,
certificates.c.deleted),
# compute_node_stats
Index('ix_compute_node_stats_compute_node_id',
compute_node_stats.c.compute_node_id),
Index('compute_node_stats_node_id_and_deleted_idx',
compute_node_stats.c.compute_node_id,
compute_node_stats.c.deleted),
# consoles
Index('consoles_instance_uuid_idx', consoles.c.instance_uuid),
# dns_domains
Index('dns_domains_domain_deleted_idx',
dns_domains.c.domain, dns_domains.c.deleted),
# fixed_ips
Index('fixed_ips_host_idx', fixed_ips.c.host),
Index('fixed_ips_network_id_host_deleted_idx', fixed_ips.c.network_id,
fixed_ips.c.host, fixed_ips.c.deleted),
Index('fixed_ips_address_reserved_network_id_deleted_idx',
fixed_ips.c.address, fixed_ips.c.reserved,
fixed_ips.c.network_id, fixed_ips.c.deleted),
Index('fixed_ips_deleted_allocated_idx', fixed_ips.c.address,
fixed_ips.c.deleted, fixed_ips.c.allocated),
# floating_ips
Index('floating_ips_host_idx', floating_ips.c.host),
Index('floating_ips_project_id_idx', floating_ips.c.project_id),
Index('floating_ips_pool_deleted_fixed_ip_id_project_id_idx',
floating_ips.c.pool, floating_ips.c.deleted,
floating_ips.c.fixed_ip_id, floating_ips.c.project_id),
# group_member
Index('instance_group_member_instance_idx',
group_member.c.instance_id),
# group_metadata
Index('instance_group_metadata_key_idx', group_metadata.c.key),
# group_policy
Index('instance_group_policy_policy_idx', group_policy.c.policy),
# instances
Index('instances_reservation_id_idx',
instances.c.reservation_id),
Index('instances_terminated_at_launched_at_idx',
instances.c.terminated_at,
instances.c.launched_at),
Index('instances_task_state_updated_at_idx',
instances.c.task_state,
instances.c.updated_at),
Index('instances_host_deleted_idx', instances.c.host,
instances.c.deleted),
Index('instances_uuid_deleted_idx', instances.c.uuid,
instances.c.deleted),
Index('instances_host_node_deleted_idx', instances.c.host,
instances.c.node, instances.c.deleted),
Index('instances_host_deleted_cleaned_idx',
instances.c.host, instances.c.deleted,
instances.c.cleaned),
# instance_actions
Index('instance_uuid_idx', instance_actions.c.instance_uuid),
Index('request_id_idx', instance_actions.c.request_id),
# instance_faults
Index('instance_faults_host_idx', instance_faults.c.host),
Index('instance_faults_instance_uuid_deleted_created_at_idx',
instance_faults.c.instance_uuid, instance_faults.c.deleted,
instance_faults.c.created_at),
# instance_id_mappings
Index('ix_instance_id_mappings_uuid', instance_id_mappings.c.uuid),
# instance_metadata
Index('instance_metadata_instance_uuid_idx',
instance_metadata.c.instance_uuid),
# instance_type_extra_specs
Index('instance_type_extra_specs_instance_type_id_key_idx',
instance_type_extra_specs.c.instance_type_id,
instance_type_extra_specs.c.key),
# iscsi_targets
Index('iscsi_targets_host_idx', iscsi_targets.c.host),
Index('iscsi_targets_host_volume_id_deleted_idx',
iscsi_targets.c.host, iscsi_targets.c.volume_id,
iscsi_targets.c.deleted),
# migrations
Index('migrations_by_host_nodes_and_status_idx',
migrations.c.deleted, migrations.c.source_compute,
migrations.c.dest_compute, migrations.c.source_node,
migrations.c.dest_node, migrations.c.status),
Index('migrations_instance_uuid_and_status_idx',
migrations.c.deleted, migrations.c.instance_uuid,
migrations.c.status),
# networks
Index('networks_host_idx', networks.c.host),
Index('networks_cidr_v6_idx', networks.c.cidr_v6),
Index('networks_bridge_deleted_idx', networks.c.bridge,
networks.c.deleted),
Index('networks_project_id_deleted_idx', networks.c.project_id,
networks.c.deleted),
Index('networks_uuid_project_id_deleted_idx',
networks.c.uuid, networks.c.project_id, networks.c.deleted),
Index('networks_vlan_deleted_idx', networks.c.vlan,
networks.c.deleted),
# project_user_quotas
Index('project_user_quotas_project_id_deleted_idx',
project_user_quotas.c.project_id,
project_user_quotas.c.deleted),
Index('project_user_quotas_user_id_deleted_idx',
project_user_quotas.c.user_id, project_user_quotas.c.deleted),
# reservations
Index('ix_reservations_project_id', reservations.c.project_id),
Index('ix_reservations_user_id_deleted',
reservations.c.user_id, reservations.c.deleted),
Index('reservations_uuid_idx', reservations.c.uuid),
# security_group_instance_association
Index('security_group_instance_association_instance_uuid_idx',
security_group_instance_association.c.instance_uuid),
# task_log
Index('ix_task_log_period_beginning', task_log.c.period_beginning),
Index('ix_task_log_host', task_log.c.host),
Index('ix_task_log_period_ending', task_log.c.period_ending),
# quota_classes
Index('ix_quota_classes_class_name', quota_classes.c.class_name),
# quota_usages
Index('ix_quota_usages_project_id', quota_usages.c.project_id),
Index('ix_quota_usages_user_id_deleted',
quota_usages.c.user_id, quota_usages.c.deleted),
# volumes
Index('volumes_instance_uuid_idx', volumes.c.instance_uuid),
]
# MySQL specific indexes
if migrate_engine.name == 'mysql':
for index in mysql_pre_indexes:
index.create(migrate_engine)
# mysql-specific index by leftmost 100 chars. (mysql gets angry if the
# index key length is too long.)
sql = ("create index migrations_by_host_nodes_and_status_idx ON "
"migrations (deleted, source_compute(100), dest_compute(100), "
"source_node(100), dest_node(100), status)")
migrate_engine.execute(sql)
# PostgreSQL specific indexes
if migrate_engine.name == 'postgresql':
Index('address', fixed_ips.c.address).create()
# NOTE(dprince): PostgreSQL doesn't allow duplicate indexes
# so we skip creation of select indexes (so schemas match exactly).
POSTGRES_INDEX_SKIPS = [
# See Havana migration 186_new_bdm_format where we dropped the
# virtual_name column.
# IceHouse fix is here: https://bugs.launchpad.net/nova/+bug/1265839
'block_device_mapping_instance_uuid_virtual_name_device_name_idx'
]
MYSQL_INDEX_SKIPS = [
# we create this one manually for MySQL above
'migrations_by_host_nodes_and_status_idx'
]
for index in common_indexes:
if ((migrate_engine.name == 'postgresql' and
index.name in POSTGRES_INDEX_SKIPS) or
(migrate_engine.name == 'mysql' and
index.name in MYSQL_INDEX_SKIPS)):
continue
else:
index.create(migrate_engine)
Index('project_id', dns_domains.c.project_id).drop
# Common foreign keys
fkeys = [
[[instance_type_projects.c.instance_type_id],
[instance_types.c.id],
'instance_type_projects_ibfk_1'],
[[iscsi_targets.c.volume_id],
[volumes.c.id],
'iscsi_targets_volume_id_fkey'],
[[reservations.c.usage_id],
[quota_usages.c.id],
'reservations_ibfk_1'],
[[security_group_instance_association.c.security_group_id],
[security_groups.c.id],
'security_group_instance_association_ibfk_1'],
[[compute_node_stats.c.compute_node_id],
[compute_nodes.c.id],
'fk_compute_node_stats_compute_node_id'],
[[compute_nodes.c.service_id],
[services.c.id],
'fk_compute_nodes_service_id'],
]
secgroup_instance_association_instance_uuid_fkey = (
'security_group_instance_association_instance_uuid_fkey')
fkeys.extend(
[
[[fixed_ips.c.instance_uuid],
[instances.c.uuid],
'fixed_ips_instance_uuid_fkey'],
[[block_device_mapping.c.instance_uuid],
[instances.c.uuid],
'block_device_mapping_instance_uuid_fkey'],
[[instance_info_caches.c.instance_uuid],
[instances.c.uuid],
'instance_info_caches_instance_uuid_fkey'],
[[instance_metadata.c.instance_uuid],
[instances.c.uuid],
'instance_metadata_instance_uuid_fkey'],
[[instance_system_metadata.c.instance_uuid],
[instances.c.uuid],
'instance_system_metadata_ibfk_1'],
[[security_group_instance_association.c.instance_uuid],
[instances.c.uuid],
secgroup_instance_association_instance_uuid_fkey],
[[virtual_interfaces.c.instance_uuid],
[instances.c.uuid],
'virtual_interfaces_instance_uuid_fkey'],
[[instance_actions.c.instance_uuid],
[instances.c.uuid],
'fk_instance_actions_instance_uuid'],
[[instance_faults.c.instance_uuid],
[instances.c.uuid],
'fk_instance_faults_instance_uuid'],
[[migrations.c.instance_uuid],
[instances.c.uuid],
'fk_migrations_instance_uuid']
])
for fkey_pair in fkeys:
if migrate_engine.name in ('mysql'):
# For MySQL we name our fkeys explicitly
# so they match Havana
fkey = ForeignKeyConstraint(columns=fkey_pair[0],
refcolumns=fkey_pair[1],
name=fkey_pair[2])
fkey.create()
elif migrate_engine.name == 'postgresql':
# PostgreSQL names things like it wants (correct and compatible!)
fkey = ForeignKeyConstraint(columns=fkey_pair[0],
refcolumns=fkey_pair[1])
fkey.create()
if migrate_engine.name == 'mysql':
# In Folsom we explicitly converted migrate_version to UTF8.
migrate_engine.execute(
'ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8')
# Set default DB charset to UTF8.
migrate_engine.execute(
'ALTER DATABASE %s DEFAULT CHARACTER SET utf8' %
migrate_engine.url.database)
_create_shadow_tables(migrate_engine)
_create_dump_tables(migrate_engine)<|fim▁end|>
|
key_pairs = Table('key_pairs', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime),
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.contrib import admin
from models import FileMapping
# Register your models here.
<|fim▁hole|>admin.site.register(FileMapping)<|fim▁end|>
| |
<|file_name|>FraudDetectionController.java<|end_file_name|><|fim▁begin|>package com.example.fraud;
import static org.springframework.web.bind.annotation.RequestMethod.PUT;
import java.math.BigDecimal;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.example.fraud.model.FraudCheck;
import com.example.fraud.model.FraudCheckResult;
import com.example.fraud.model.FraudCheckStatus;
@RestController
public class FraudDetectionController {
private static final String FRAUD_SERVICE_JSON_VERSION_1 = "application/vnd.fraud.v1+json";
private static final String NO_REASON = null;
private static final String AMOUNT_TOO_HIGH = "Amount too high";
private static final BigDecimal MAX_AMOUNT = new BigDecimal("5000");
@RequestMapping(
value = "/fraudcheck",
method = PUT,
consumes = FRAUD_SERVICE_JSON_VERSION_1,
produces = FRAUD_SERVICE_JSON_VERSION_1)
public FraudCheckResult fraudCheck(@RequestBody FraudCheck fraudCheck) {
if (amountGreaterThanThreshold(fraudCheck)) {
return new FraudCheckResult(FraudCheckStatus.FRAUD, AMOUNT_TOO_HIGH);
}<|fim▁hole|> private boolean amountGreaterThanThreshold(FraudCheck fraudCheck) {
return MAX_AMOUNT.compareTo(fraudCheck.getLoanAmount()) < 0;
}
}<|fim▁end|>
|
return new FraudCheckResult(FraudCheckStatus.OK, NO_REASON);
}
|
<|file_name|>shell_tests_main.cc<|end_file_name|><|fim▁begin|>// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/command_line.h"
#include "base/test/launcher/test_launcher.h"<|fim▁hole|>#include "build/build_config.h"
#include "extensions/shell/test/shell_test_launcher_delegate.h"
#include "testing/gtest/include/gtest/gtest.h"
#if defined(OS_WIN)
#include "base/win/win_util.h"
#endif // defined(OS_WIN)
int main(int argc, char** argv) {
base::CommandLine::Init(argc, argv);
size_t parallel_jobs = base::NumParallelJobs(/*cores_per_job=*/2);
if (parallel_jobs == 0U)
return 1;
#if defined(OS_WIN)
// Load and pin user32.dll to avoid having to load it once tests start while
// on the main thread loop where blocking calls are disallowed.
base::win::PinUser32();
#endif // OS_WIN
extensions::AppShellTestLauncherDelegate launcher_delegate;
return content::LaunchTests(&launcher_delegate, parallel_jobs, argc, argv);
}<|fim▁end|>
| |
<|file_name|>windows_registry.py<|end_file_name|><|fim▁begin|>#!./env/bin/python
""" Windows Registry Network Query
Lists the network name and MAC addresses of the networks that
this computer has connected to. If the location command is given
print the coordinates of the network if they are in the wigile
datebase
Don't be a moron, please don't use this for something illegal.
Usage:
windows_registry.py
windows_registry.py location <username> <password>
windows_registry.py -h | --help
windows_registry.py --version
Options:
-h, --help Display this message<|fim▁hole|>
import mechanize
import urllib
import re
from _winreg import OpenKey, EnumKey, EnumValue, HKEY_LOCAL_MACHINE, CloseKey
from docopt import docopt
def binary2mac(binary):
address = ""
for char in binary:
address += ("%02x " % ord(char))
address = address.strip(" ").replace(" ", ":")[0:17]
return address
def wigle_print(username, password, netid):
browser = mechanize.Browser()
browser.open('http://wigle.net')
reqData = urllib.urlencode({'credential_0': username,
'credential_1': password})
browser.open('https://wigle.net//gps/gps/main/login', reqData)
params = {}
params['netid'] = netid
reqParams = urllib.urlencode(params)
respURL = 'http://wigle.net/gps/gps/main/confirmquery/'
resp = browser.open(respURL, reqParams).read()
mapLat = 'N/A'
mapLon = 'N/A'
rLat = re.findall(r'maplat=.*\&', resp)
if rLat:
mapLat = rLat[0].split('&')[0].split('=')[1]
rLon = re.findall(r'maplon=.*\&', resp)
if rLon:
mapLon = rLon[0].split
print '[-] Lat: ' + mapLat + ', Lon: ' + mapLon
def print_networks(username=None, password=None):
net = "SOFTWARE\Microsoft\Windows NT\CurrentVersion\NetworkList\Signatures\Unmanaged"
key = OpenKey(HKEY_LOCAL_MACHINE, net)
print '\n[*] Networks You have Joined.'
for i in range(100):
try:
guid = EnumKey(key, i)
netKey = OpenKey(key, str(guid))
(n, addr, t) = EnumValue(netKey, 5)
(n, name, t) = EnumValue(netKey, 4)
mac = binary2mac(addr)
net_name = str(name)
print '[+] ' + net_name + ' ' + mac
wigle_print(username, password, mac)
CloseKey(netKey)
except:
break
def main():
arguments = docopt(__doc__, version=0.1)
if arguments['location']:
print_networks(username=arguments['username'], password=arguments['password'])
else:
print_networks()
if __name__ == '__main__':
main()<|fim▁end|>
|
--version Display the version of this program
"""
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use core::primitives::*;
use core::io::*;
use operations::scope::ScopeStack;
/// Populates the given scope with all declarations that
/// should be available in every module at the top level
/// (known as the Prelude)
pub fn populate_scope(scope: &mut ScopeStack) {
// Taking advantage of the scope system to simulate modules
// This will be replaced with something better in:
// https://github.com/brain-lang/brain/issues/37
scope.push_scope();
array::define_array(scope);
let bool_type = boolean::define_boolean(scope);
let u8_type = u8::define_u8(scope, bool_type);
stdio::define_stdin(scope, u8_type);
stdio::define_stdout(scope);<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>var app = app || {};
$(function() { //when DOM is ready...
if ( app.user_session ) {
console.log( "%csession" , "font-size: 2em; color: rgba(100,100,200,1.0);" );
} else {
console.log( "%cno session" , "font-size: 2em; color: rgba(200,100,100,1.0);" );
}
app.profile_content = new ProfileCollection();
app.project_content = new ProjectCollection();
app.router = new App.Router({
user_session: app.user_session ,
logged_user: app.logged_user,
logged_user_key: app.logged_user_key,
profiles: app.profile_content ,
projects: app.project_content
});
//This sets up routes with hashtags. Like /#blog , /#products , /#users
Backbone.history.start();
//This sets up routes with hashtags. Like /blog , /products , /users
//Backbone.history.start({ pushState: true });<|fim▁hole|><|fim▁end|>
|
});
|
<|file_name|>term_query.rs<|end_file_name|><|fim▁begin|>use super::term_weight::TermWeight;
use crate::query::bm25::Bm25Weight;
use crate::query::Weight;
use crate::query::{Explanation, Query};
use crate::schema::IndexRecordOption;
use crate::Searcher;
use crate::Term;
use std::collections::BTreeMap;
use std::fmt;
/// A Term query matches all of the documents
/// containing a specific term.
///
/// The score associated is defined as
/// `idf` * sqrt(`term_freq` / `field norm`)
/// in which :
/// * `idf` - inverse document frequency.
/// * `term_freq` - number of occurrences of the term in the field
/// * `field norm` - number of tokens in the field.
///
/// ```rust
/// use tantivy::collector::{Count, TopDocs};
/// use tantivy::query::TermQuery;
/// use tantivy::schema::{Schema, TEXT, IndexRecordOption};
/// use tantivy::{doc, Index, Term};
/// # fn test() -> tantivy::Result<()> {
/// let mut schema_builder = Schema::builder();
/// let title = schema_builder.add_text_field("title", TEXT);
/// let schema = schema_builder.build();
/// let index = Index::create_in_ram(schema);
/// {
/// let mut index_writer = index.writer(3_000_000)?;
/// index_writer.add_document(doc!(
/// title => "The Name of the Wind",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "The Diary of Muadib",
/// ))?;
/// index_writer.add_document(doc!(
/// title => "A Dairy Cow",
/// ))?;
/// index_writer.add_document(doc!(<|fim▁hole|>/// index_writer.commit()?;
/// }
/// let reader = index.reader()?;
/// let searcher = reader.searcher();
/// let query = TermQuery::new(
/// Term::from_field_text(title, "diary"),
/// IndexRecordOption::Basic,
/// );
/// let (top_docs, count) = searcher.search(&query, &(TopDocs::with_limit(2), Count))?;
/// assert_eq!(count, 2);
/// Ok(())
/// # }
/// # assert!(test().is_ok());
/// ```
#[derive(Clone)]
pub struct TermQuery {
term: Term,
index_record_option: IndexRecordOption,
}
impl fmt::Debug for TermQuery {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TermQuery({:?})", self.term)
}
}
impl TermQuery {
/// Creates a new term query.
pub fn new(term: Term, segment_postings_options: IndexRecordOption) -> TermQuery {
TermQuery {
term,
index_record_option: segment_postings_options,
}
}
/// The `Term` this query is built out of.
pub fn term(&self) -> &Term {
&self.term
}
/// Returns a weight object.
///
/// While `.weight(...)` returns a boxed trait object,
/// this method return a specific implementation.
/// This is useful for optimization purpose.
pub fn specialized_weight(
&self,
searcher: &Searcher,
scoring_enabled: bool,
) -> crate::Result<TermWeight> {
let field_entry = searcher.schema().get_field_entry(self.term.field());
if !field_entry.is_indexed() {
let error_msg = format!("Field {:?} is not indexed.", field_entry.name());
return Err(crate::TantivyError::SchemaError(error_msg));
}
let bm25_weight = if scoring_enabled {
Bm25Weight::for_terms(searcher, &[self.term.clone()])?
} else {
Bm25Weight::new(Explanation::new("<no score>".to_string(), 1.0f32), 1.0f32)
};
let index_record_option = if scoring_enabled {
self.index_record_option
} else {
IndexRecordOption::Basic
};
Ok(TermWeight::new(
self.term.clone(),
index_record_option,
bm25_weight,
scoring_enabled,
))
}
}
impl Query for TermQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
Ok(Box::new(
self.specialized_weight(searcher, scoring_enabled)?,
))
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
terms.insert(self.term.clone(), false);
}
}<|fim▁end|>
|
/// title => "The Diary of a Young Girl",
/// ))?;
|
<|file_name|>HTMLProgressElement.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#include "config.h"
#include "HTMLProgressElement.h"
#include "ElementIterator.h"
#include "EventNames.h"
#include "ExceptionCode.h"
#include "HTMLNames.h"
#include "HTMLParserIdioms.h"
#include "ProgressShadowElement.h"
#include "RenderProgress.h"
#include "ShadowRoot.h"
namespace WebCore {
using namespace HTMLNames;
const double HTMLProgressElement::IndeterminatePosition = -1;
const double HTMLProgressElement::InvalidPosition = -2;
HTMLProgressElement::HTMLProgressElement(const QualifiedName& tagName, Document& document)
: LabelableElement(tagName, document)
, m_value(0)
{
ASSERT(hasTagName(progressTag));
setHasCustomStyleResolveCallbacks();
}
HTMLProgressElement::~HTMLProgressElement()
{
}
Ref<HTMLProgressElement> HTMLProgressElement::create(const QualifiedName& tagName, Document& document)
{
Ref<HTMLProgressElement> progress = adoptRef(*new HTMLProgressElement(tagName, document));
progress->ensureUserAgentShadowRoot();
return progress;
}
RenderPtr<RenderElement> HTMLProgressElement::createElementRenderer(Ref<RenderStyle>&& style, const RenderTreePosition&)
{
if (!style.get().hasAppearance())
return RenderElement::createFor(*this, WTFMove(style));
return createRenderer<RenderProgress>(*this, WTFMove(style));
}
bool HTMLProgressElement::childShouldCreateRenderer(const Node& child) const
{
return hasShadowRootParent(child) && HTMLElement::childShouldCreateRenderer(child);
}
RenderProgress* HTMLProgressElement::renderProgress() const
{
if (is<RenderProgress>(renderer()))
return downcast<RenderProgress>(renderer());
return downcast<RenderProgress>(descendantsOfType<Element>(*userAgentShadowRoot()).first()->renderer());
}
void HTMLProgressElement::parseAttribute(const QualifiedName& name, const AtomicString& value)
{
if (name == valueAttr)
didElementStateChange();
else if (name == maxAttr)
didElementStateChange();
else
LabelableElement::parseAttribute(name, value);
}
void HTMLProgressElement::didAttachRenderers()
{
if (RenderProgress* render = renderProgress())
render->updateFromElement();
}
double HTMLProgressElement::value() const
{
double value = parseToDoubleForNumberType(fastGetAttribute(valueAttr));
return !std::isfinite(value) || value < 0 ? 0 : std::min(value, max());
}
void HTMLProgressElement::setValue(double value, ExceptionCode& ec)
{
if (!std::isfinite(value)) {
ec = NOT_SUPPORTED_ERR;
return;
}
setAttribute(valueAttr, AtomicString::number(value >= 0 ? value : 0));
}
double HTMLProgressElement::max() const
{
double max = parseToDoubleForNumberType(fastGetAttribute(maxAttr));
return !std::isfinite(max) || max <= 0 ? 1 : max;
}
void HTMLProgressElement::setMax(double max, ExceptionCode& ec)
{
if (!std::isfinite(max)) {
ec = NOT_SUPPORTED_ERR;
return;
}
setAttribute(maxAttr, AtomicString::number(max > 0 ? max : 1));
}
double HTMLProgressElement::position() const
{
if (!isDeterminate())
return HTMLProgressElement::IndeterminatePosition;
return value() / max();
}
bool HTMLProgressElement::isDeterminate() const
{
return fastHasAttribute(valueAttr);
}
void HTMLProgressElement::didElementStateChange()
{
m_value->setWidthPercentage(position() * 100);<|fim▁hole|> if (RenderProgress* render = renderProgress()) {
bool wasDeterminate = render->isDeterminate();
render->updateFromElement();
if (wasDeterminate != isDeterminate())
setNeedsStyleRecalc();
}
}
void HTMLProgressElement::didAddUserAgentShadowRoot(ShadowRoot* root)
{
ASSERT(!m_value);
Ref<ProgressInnerElement> inner = ProgressInnerElement::create(document());
root->appendChild(inner.copyRef());
Ref<ProgressBarElement> bar = ProgressBarElement::create(document());
Ref<ProgressValueElement> value = ProgressValueElement::create(document());
m_value = value.ptr();
m_value->setWidthPercentage(HTMLProgressElement::IndeterminatePosition * 100);
bar->appendChild(*m_value, ASSERT_NO_EXCEPTION);
inner->appendChild(WTFMove(bar), ASSERT_NO_EXCEPTION);
}
bool HTMLProgressElement::shouldAppearIndeterminate() const
{
return !isDeterminate();
}
} // namespace<|fim▁end|>
| |
<|file_name|>_keyToPairIn.js<|end_file_name|><|fim▁begin|>import _curry2 from "./_curry2";
/**
* Accepts an object and build a function expecting a key to create a "pair" with the key
* and its value.
* @private
* @function
* @param {Object} obj
* @returns {Function}
*/
var _keyToPairIn = _curry2(function (obj, key) {<|fim▁hole|> return [key, obj[key]];
});
export default _keyToPairIn;<|fim▁end|>
| |
<|file_name|>DetachInstancesResponse.java<|end_file_name|><|fim▁begin|>package com.aliyun.api.ess.ess20140828.response;
import com.aliyun.api.AliyunResponse;
import com.taobao.api.internal.mapping.ApiField;
/**
* TOP API: ess.aliyuncs.com.DetachInstances.2014-08-28 response.
*
* @author auto create
* @since 1.0, null
*/
public class DetachInstancesResponse extends AliyunResponse {
private static final long serialVersionUID = 3317111479582962569L;
<|fim▁hole|> */
@ApiField("RequestId")
private String requestId;
/**
* 伸缩活动id
*/
@ApiField("ScalingActivityId")
private String scalingActivityId;
public String getRequestId() {
return this.requestId;
}
public String getScalingActivityId() {
return this.scalingActivityId;
}
public void setRequestId(String requestId) {
this.requestId = requestId;
}
public void setScalingActivityId(String scalingActivityId) {
this.scalingActivityId = scalingActivityId;
}
}<|fim▁end|>
|
/**
* 1
|
<|file_name|>timeStub.cpp<|end_file_name|><|fim▁begin|>#include "hal/time/time.hpp"
namespace hal
{
namespace time
{
static u64 currentTime = 0;
u64 milliseconds()
{
return currentTime;
}
} // namespace time
} // namespace hal
namespace stub
{
namespace time
{
void setCurrentTime(u64 milliseconds)
{
hal::time::currentTime = milliseconds;
}
<|fim▁hole|> hal::time::currentTime += milliseconds;
}
} // namespace time
} // namespace stub<|fim▁end|>
|
void forwardTime(u64 milliseconds)
{
|
<|file_name|>DP - Subset Sum.cpp<|end_file_name|><|fim▁begin|>//Subset-Sum -> (G = O valor total sendo testado, N = numero de valores disponiveis no array 'values'<|fim▁hole|> sub[0] = 1;
for(j = 0; j < n; j++) if(values[j] != g) {
for(int k = g; k >= values[j]; k--) {
sub[k] |= sub[k - values[j]];
}
}
return sub[g];
}<|fim▁end|>
|
int values[n];
bool subsetSum(int n, int g) {
for(j = 0; j <= g; j++) sub[j] = 0;
|
<|file_name|>SipAddressDataItem.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.contacts.model.dataitem;
import android.content.ContentValues;
import android.provider.ContactsContract;
import android.provider.ContactsContract.CommonDataKinds.SipAddress;
import com.android.contacts.model.RawContact;
/**
* Represents a sip address data item, wrapping the columns in
* {@link ContactsContract.CommonDataKinds.SipAddress}.
*/
public class SipAddressDataItem extends DataItem {
/* package */ SipAddressDataItem(RawContact rawContact, ContentValues values) {
super(rawContact, values);<|fim▁hole|> }
public String getSipAddress() {
return getContentValues().getAsString(SipAddress.SIP_ADDRESS);
}
/**
* Value is one of SipAddress.TYPE_*
*/
public int getType() {
return getContentValues().getAsInteger(SipAddress.TYPE);
}
public String getLabel() {
return getContentValues().getAsString(SipAddress.LABEL);
}
}<|fim▁end|>
| |
<|file_name|>data_utils.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import os
import re
import numpy as np
import tensorflow as tf
stop_words=set(["a","an","the"])
def load_candidates(data_dir, task_id):
assert task_id > 0 and task_id < 6
candidates=[]
candidates_f=None
candid_dic={}
#candidates_f='candidates.txt'
candidates_f='candidates' + str(task_id) + '.txt'
with open(os.path.join(data_dir,candidates_f)) as f:
for i,line in enumerate(f):
candid_dic[line.strip().split(' ',1)[1]] = i
line=tokenize(line.strip())[1:]
candidates.append(line)
# return candidates,dict((' '.join(cand),i) for i,cand in enumerate(candidates))
return candidates,candid_dic
def load_test_candidates(data_dir, task_id, test_id):
assert task_id > 0 and task_id < 6
candidates=[]
candidates_f=None
candid_dic={}
'''
if test_id == 1 or test_id == 2:
candidates_f='candidates.txt'
else:
candidates_f='candidates-ext.txt'
'''
if test_id == 1 or test_id == 2:
candidates_f='candidates' + str(task_id) + '.txt'
else:
candidates_f='candidates' + str(task_id) + '_tst'+ str(test_id) + '.txt'
with open(os.path.join(data_dir,candidates_f)) as f:
for i,line in enumerate(f):
candid_dic[line.strip().split(' ',1)[1]] = i
line=tokenize(line.strip())[1:]
candidates.append(line)
# return candidates,dict((' '.join(cand),i) for i,cand in enumerate(candidates))
return candidates,candid_dic
def load_dialog_task(data_dir, task_id, candid_dic, isOOV):
'''Load the nth task. There are 20 tasks in total.
Returns a tuple containing the training and testing data for the task.
'''
assert task_id > 0 and task_id < 6
files = os.listdir(data_dir)
files = [os.path.join(data_dir, f) for f in files]
s = '-dialog-task{}'.format(task_id)
train_file = [f for f in files if s in f and 'train' in f][0]
test_file = [f for f in files if s in f and 'dev' in f][0]
val_file = [f for f in files if s in f and 'dev' in f][0]
train_data = get_dialogs(train_file,candid_dic)
test_data = get_dialogs(test_file,candid_dic)
val_data = get_dialogs(val_file,candid_dic)
return train_data, test_data, val_data
def tokenize(sent):
'''Return the tokens of a sentence including punctuation.
>>> tokenize('Bob dropped the apple. Where is the apple?')
['Bob', 'dropped', 'the', 'apple', '.', 'Where', 'is', 'the', 'apple']
'''
sent=sent.lower()
if sent=='<silence>':
return [sent]
result=[x.strip() for x in re.split('(\W+)?', sent) if x.strip() and x.strip() not in stop_words]
if not result:
result=['<silence>']
if result[-1]=='.' or result[-1]=='?' or result[-1]=='!':
result=result[:-1]
return result
def load_dialog_test_data(data_dir, task_id, test_id):
assert task_id > 0 and task_id < 6
files = os.listdir(data_dir)
files = [os.path.join(data_dir, f) for f in files]
s = '-dialog-task{}'.format(task_id)
t = 'tst_' + str(test_id)
test_file = [f for f in files if s in f and t in f][0]
test_data = get_test_dialogs(test_file)
return test_data
def get_test_dialogs(f):
'''Given a file name, read the file, retrieve the dialogs, and then convert the sentences into a single dialog.
If max_length is supplied, any stories longer than max_length tokens will be discarded.
'''
with open(f) as f:
return parse_test_dialogs(f.readlines())
def parse_test_dialogs(lines):
'''
Parse dialogs provided in the babi tasks format
'''
data=[]
context=[]
u=None
r=None
a=-1
dialog_id=0
for line in lines:
line=line.strip()
if line:
nid, line = line.split(' ', 1)
nid = int(nid)
if '\t' in line:
u, r = line.split('\t')
u = tokenize(u)
r = tokenize(r)
# temporal encoding, and utterance/response encoding
# data.append((context[:],u[:],candid_dic[' '.join(r)]))
# data.append((context[:],u[:],a,dialog_id))
u.append('$u')
u.append('#'+str(nid))
r.append('$r')
r.append('#'+str(nid))
context.append(u)
context.append(r)
else:
r=tokenize(line)
r.append('$r')
r.append('#'+str(nid))
context.append(r)
else:
data.append((context[:-2],u[:],a,dialog_id))
# clear context
u=None
r=None
a=None
context=[]
dialog_id=dialog_id+1
return data
def parse_dialogs_per_response(lines,candid_dic):
'''
Parse dialogs provided in the babi tasks format
'''
data=[]
context=[]
u=None
r=None
dialog_id=0
for line in lines:
line=line.strip()
if line:
nid, line = line.split(' ', 1)
nid = int(nid)
if '\t' in line:
u, r = line.split('\t')
a = candid_dic[r]
u = tokenize(u)
r = tokenize(r)
# temporal encoding, and utterance/response encoding
# data.append((context[:],u[:],candid_dic[' '.join(r)]))
data.append((context[:],u[:],a,dialog_id))
u.append('$u')
u.append('#'+str(nid))
r.append('$r')
r.append('#'+str(nid))
context.append(u)
context.append(r)
else:
r=tokenize(line)
r.append('$r')
r.append('#'+str(nid))
context.append(r)
else:
dialog_id=dialog_id+1
# clear context
context=[]
return data
def get_dialogs(f,candid_dic):
'''Given a file name, read the file, retrieve the dialogs, and then convert the sentences into a single dialog.
If max_length is supplied, any stories longer than max_length tokens will be discarded.
'''
with open(f) as f:
return parse_dialogs_per_response(f.readlines(),candid_dic)
def vectorize_candidates_sparse(candidates,word_idx):
shape=(len(candidates),len(word_idx)+1)
indices=[]
values=[]
for i,candidate in enumerate(candidates):
for w in candidate:
indices.append([i,word_idx[w]])
values.append(1.0)
return tf.SparseTensor(indices,values,shape)
def vectorize_candidates(candidates,word_idx,sentence_size):
shape=(len(candidates),sentence_size)
C=[]
for i,candidate in enumerate(candidates):
lc=max(0,sentence_size-len(candidate))
C.append([word_idx[w] if w in word_idx else 0 for w in candidate] + [0] * lc)
return tf.constant(C,shape=shape)
def vectorize_data(data, word_idx, sentence_size, batch_size, candidates_size, max_memory_size, candidates, match_feature_flag):
"""
Vectorize stories and queries.
If a sentence length < sentence_size, the sentence will be padded with 0's.
If a story length < memory_size, the story will be padded with empty memories.
Empty memories are 1-D arrays of length sentence_size filled with 0's.
The answer array is returned as a one-hot encoding.
"""
atmosphere_restriction_set={'casual','romantic','business','glutenfree','vegan','vegetarian'}
S = []
Q = []
A = []
C = []
data.sort(key=lambda x:len(x[0]),reverse=True)
for i, (story, query, answer, start) in enumerate(data):
if i%batch_size==0:
memory_size=max(1,min(max_memory_size,len(story)))
ss = []
story_query_vocab = set()
for i, sentence in enumerate(story, 1):
ls = max(0, sentence_size - len(sentence))
ss.append([word_idx[w] if w in word_idx else 0 for w in sentence] + [0] * ls)
for w in sentence:
story_query_vocab.add(w)
# take only the most recent sentences that fit in memory
ss = ss[::-1][:memory_size][::-1]
# pad to memory_size
lm = max(0, memory_size - len(ss))
for _ in range(lm):
ss.append([0] * sentence_size)
lq = max(0, sentence_size - len(query))
q = [word_idx[w] if w in word_idx else 0 for w in query] + [0] * lq
for w in query:<|fim▁hole|>
c = []
for j,candidate in enumerate(candidates):
candidate_vocab = set()
for w in candidate:
candidate_vocab.add(w)
candidate_vocab = candidate_vocab.intersection(atmosphere_restriction_set)
extra_feature_len=0
match_feature=[]
if candidate_vocab <= story_query_vocab and len(candidate_vocab) > 0 and match_feature_flag:
extra_feature_len=1
match_feature.append(word_idx['MATCH_ATMOSPHERE_RESTRICTION'])
lc=max(0,sentence_size-len(candidate)-extra_feature_len)
c.append([word_idx[w] if w in word_idx else 0 for w in candidate] + [0] * lc + match_feature)
S.append(np.array(ss))
Q.append(np.array(q))
A.append(np.array(answer))
C.append(np.array(c))
return S, Q, A, C
def vectorize_data_with_surface_form(data, word_idx, sentence_size, batch_size, candidates_size, max_memory_size, candidates, match_feature_flag):
"""
Vectorize stories and queries.
If a sentence length < sentence_size, the sentence will be padded with 0's.
If a story length < memory_size, the story will be padded with empty memories.
Empty memories are 1-D arrays of length sentence_size filled with 0's.
The answer array is returned as a one-hot encoding.
"""
atmosphere_restriction_set={'casual','romantic','business','glutenfree','vegan','vegetarian'}
S = []
Q = []
A = []
C = []
S_in_readable_form = []
Q_in_readable_form = []
dialogIDs = []
last_db_results = []
data.sort(key=lambda x:len(x[0]),reverse=True)
for i, (story, query, answer, dialog_id) in enumerate(data):
if i%batch_size==0:
memory_size=max(1,min(max_memory_size,len(story)))
ss = []
story_string = []
story_query_vocab = set()
dbentries =set([])
dbEntriesRead=False
last_db_result=""
for i, sentence in enumerate(story, 1):
ls = max(0, sentence_size - len(sentence))
ss.append([word_idx[w] if w in word_idx else 0 for w in sentence] + [0] * ls)
for w in sentence:
story_query_vocab.add(w)
story_element = ' '.join([str(x) for x in sentence[:-2]])
# if the story element is a database response/result
if 'r_' in story_element and 'api_call' not in story_element:
dbEntriesRead = True
if 'r_rating' in story_element:
dbentries.add( sentence[0] + '(' + sentence[2] + ')')
else:
if dbEntriesRead:
#story_string.append('$db : ' + ' '.join([str(x) for x in dbentries]))
last_db_result = '$db : ' + ' '.join([str(x) for x in dbentries])
dbentries =set([])
dbEntriesRead = False
#story_string.append(' '.join([str(x) for x in sentence[-2:]]) + ' : ' + story_element)
story_string.append(' '.join([str(x) for x in sentence[-2:]]) + ' : ' + story_element)
# take only the most recent sentences that fit in memory
ss = ss[::-1][:memory_size][::-1]
# pad to memory_size
lm = max(0, memory_size - len(ss))
for _ in range(lm):
ss.append([0] * sentence_size)
lq = max(0, sentence_size - len(query))
q = [word_idx[w] if w in word_idx else 0 for w in query] + [0] * lq
for w in query:
story_query_vocab.add(w)
story_query_vocab = story_query_vocab.intersection(atmosphere_restriction_set)
c = []
for j,candidate in enumerate(candidates):
candidate_vocab = set()
for w in candidate:
candidate_vocab.add(w)
candidate_vocab = candidate_vocab.intersection(atmosphere_restriction_set)
extra_feature_len=0
match_feature=[]
if candidate_vocab == story_query_vocab and len(candidate_vocab) > 0 and match_feature_flag:
extra_feature_len=1
match_feature.append(word_idx['MATCH_ATMOSPHERE_RESTRICTION'])
lc=max(0,sentence_size-len(candidate)-extra_feature_len)
c.append([word_idx[w] if w in word_idx else 0 for w in candidate] + [0] * lc + match_feature)
S.append(np.array(ss))
Q.append(np.array(q))
A.append(np.array(answer))
C.append(np.array(c))
S_in_readable_form.append(story_string)
Q_in_readable_form.append(' '.join([str(x) for x in query]))
last_db_results.append(last_db_result)
dialogIDs.append(dialog_id)
return S, Q, A, C, S_in_readable_form, Q_in_readable_form, last_db_results, dialogIDs
def restaurant_reco_evluation(test_preds, testA, indx2candid):
total = 0
match = 0
for idx, val in enumerate(test_preds):
answer = indx2candid[testA[idx].item(0)]
prediction = indx2candid[val]
if "what do you think of this option:" in prediction:
total = total+1
if prediction == answer:
match=match+1
print('Restaurant Recommendation Accuracy : ' + str(match/float(total)) + " (" + str(match) + "/" + str(total) + ")")
if __name__ == '__main__':
u = tokenize('The phone number of taj_tandoori is taj_tandoori_phone')
print(u)<|fim▁end|>
|
story_query_vocab.add(w)
story_query_vocab = story_query_vocab.intersection(atmosphere_restriction_set)
|
<|file_name|>test_shutdown.rs<|end_file_name|><|fim▁begin|>use std::thread::{self};
use std::time::{Duration};
use umio::{ELoopBuilder};
use {MockDispatcher, MockMessage};
#[test]
fn positive_execute_shutdown() {
let eloop_addr = "127.0.0.1:0".parse().unwrap();
let mut eloop = ELoopBuilder::new()
.bind_address(eloop_addr)
.build().unwrap();
let (dispatcher, _) = MockDispatcher::new();
let dispatch_send = eloop.channel();
thread::spawn(move || {
eloop.run(dispatcher).unwrap();<|fim▁hole|> thread::sleep(Duration::from_millis(50));
dispatch_send.send(MockMessage::Shutdown).unwrap();
thread::sleep(Duration::from_millis(50));
assert!(dispatch_send.send(MockMessage::SendNotify).is_err());
}<|fim▁end|>
|
});
|
<|file_name|>resolve_dns_test.go<|end_file_name|><|fim▁begin|>package uri
import (
"testing"
"github.com/telehash/gogotelehash/Godeps/_workspace/src/github.com/stretchr/testify/assert"
_ "github.com/telehash/gogotelehash/e3x"
)
func Test_resolveDNS(t *testing.T) {
// When using boot2docker run:
// boot2docker stop
// VBoxManage modifyvm "boot2docker-vm" --natdnshostresolver1 off
// VBoxManage modifyvm "boot2docker-vm" --natdnsproxy1 on
// boot2docker start
assert := assert.New(t)
uri, err := Parse("01.test.simonmenke.me")
if err != nil {<|fim▁hole|> ident, err := resolveSRV(uri, "udp")
if assert.NoError(err) && assert.NotNil(ident) {
t.Logf("ident=%v addrs=%v keys=%v", ident, ident.Addresses(), ident.Keys())
}
}<|fim▁end|>
|
panic(err)
}
|
<|file_name|>usat.rs<|end_file_name|><|fim▁begin|>use crate::core::instruction::Instruction;
#[allow(non_snake_case)]
pub fn decode_USAT_t1(opcode: u32) -> Instruction {
Instruction::UDF {
imm32: 0,<|fim▁hole|> thumb32: true,
}
}<|fim▁end|>
|
opcode: opcode.into(),
|
<|file_name|>ban.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
#![feature(plugin)]
#![plugin(script_plugins)]
extern crate js;
use js::jsval::JSVal;<|fim▁hole|>use std::cell::UnsafeCell;
struct Foo {
bar: Cell<JSVal>,
//~^ ERROR Banned type Cell<JSVal> detected. Use MutDom<JSVal> instead
foo: UnsafeCell<JSVal>
//~^ NOT AN ERROR
}
fn main() {}<|fim▁end|>
|
use std::cell::Cell;
|
<|file_name|>CustomLabel.java<|end_file_name|><|fim▁begin|>/*
* uiComponents.CustomLabel
*
*------------------------------------------------------------------------------
* Copyright (C) 2006-2008 University of Dundee. All rights reserved.
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*<|fim▁hole|> */
package org.openmicroscopy.shoola.agents.editor.uiComponents;
//Java imports
import javax.swing.Icon;
import javax.swing.JLabel;
//Third-party libraries
//Application-internal dependencies
/**
* A Custom Label, which should be used by the UI instead of using
* JLabel. Sets the font to CUSTOM FONT.
*
* This font is also used by many other Custom UI components in this
* package, making it easy to change the font in many components in
* one place (here!).
*
* @author William Moore
* <a href="mailto:[email protected]">[email protected]</a>
* @version 3.0
* <small>
* (<b>Internal version:</b> $Revision: $Date: $)
* </small>
* @since OME3.0
*/
public class CustomLabel
extends JLabel {
private int fontSize;
/**
* Simply delegates to JLabel superclass.
*/
public CustomLabel() {
super();
setFont();
}
/**
* Simply delegates to JLabel superclass.
*/
public CustomLabel(Icon image) {
super(image);
setFont();
}
/**
* Simply delegates to JLabel superclass.
*/
public CustomLabel(String text) {
super(text);
setFont();
}
/**
* Simply delegates to JLabel superclass.
*/
public CustomLabel(String text, int fontSize) {
super(text);
this.fontSize = fontSize;
setFont();
}
private void setFont()
{
if (fontSize == 0)
setFont(new CustomFont());
else {
setFont(CustomFont.getFontBySize(fontSize));
}
}
}<|fim▁end|>
|
*------------------------------------------------------------------------------
|
<|file_name|>DirectionalLightHelper.js<|end_file_name|><|fim▁begin|>/**
* @author alteredq / http://alteredqualia.com/
* @author mrdoob / http://mrdoob.com/
* @author WestLangley / http://github.com/WestLangley
*/
THREE.DirectionalLightHelper = function ( light, size ) {
THREE.Object3D.call( this );
this.light = light;
this.light.updateMatrixWorld();
this.matrixWorld = light.matrixWorld;
this.matrixAutoUpdate = false;
<|fim▁hole|> size = size || 1;
var geometry = new THREE.PlaneGeometry( size, size );
var material = new THREE.MeshBasicMaterial( { wireframe: true, fog: false } );
material.color.copy( this.light.color ).multiplyScalar( this.light.intensity );
this.lightPlane = new THREE.Mesh( geometry, material );
this.add( this.lightPlane );
geometry = new THREE.Geometry();
geometry.vertices.push( new THREE.Vector3() );
geometry.vertices.push( new THREE.Vector3() );
material = new THREE.LineBasicMaterial( { fog: false } );
material.color.copy( this.light.color ).multiplyScalar( this.light.intensity );
this.targetLine = new THREE.Line( geometry, material );
this.add( this.targetLine );
this.update();
};
THREE.DirectionalLightHelper.prototype = Object.create( THREE.Object3D.prototype );
THREE.DirectionalLightHelper.prototype.dispose = function () {
this.lightPlane.geometry.dispose();
this.lightPlane.material.dispose();
this.targetLine.geometry.dispose();
this.targetLine.material.dispose();
};
THREE.DirectionalLightHelper.prototype.update = function () {
var v1 = new THREE.Vector3();
var v2 = new THREE.Vector3();
var v3 = new THREE.Vector3();
return function () {
v1.setFromMatrixPosition( this.light.matrixWorld );
v2.setFromMatrixPosition( this.light.target.matrixWorld );
v3.subVectors( v2, v1 );
this.lightPlane.lookAt( v3 );
this.lightPlane.material.color.copy( this.light.color ).multiplyScalar( this.light.intensity );
this.targetLine.geometry.vertices[ 1 ].copy( v3 );
this.targetLine.geometry.verticesNeedUpdate = true;
this.targetLine.material.color.copy( this.lightPlane.material.color );
}
}();<|fim▁end|>
| |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "photoboard.settings")
application = get_wsgi_application()<|fim▁end|>
|
"""
WSGI config for photoboard project.
It exposes the WSGI callable as a module-level variable named ``application``.
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>SUCCESS_URL_KEY = 'twitter_success_url'
USERINFO_KEY = 'twitter_user_info'<|fim▁end|>
|
ACCESS_KEY = 'twitter_access_token'
REQUEST_KEY = 'twitter_request_token'
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod mod2a;
mod mod2b;
mod mymod1 {
use mod2a::{Bar, Foo};
mod mod3a;
}
#[path = "mod2c.rs"]
mod mymod2;
<|fim▁hole|><|fim▁end|>
|
mod submod2;
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Lexical analysis.
use std::str;
use std::fmt;
use kailua_diag::{Locale, Localize, Localized};
use string::{Name, Str};
/// A token.
#[derive(Clone, Debug, PartialEq)]
pub enum Tok {
/// A token which is distinct from all other tokens.
///
/// The lexer emits this token on an error.
Error,
/// A comment token. The parser should ignore this.
///
/// The shebang line (the first line starting with `#`) is also considered as a comment.
Comment,
/// A punctuation.
Punct(Punct),
/// A keyword.
Keyword(Keyword),
/// A number.
Num(f64),
/// A name (either an identifier or a quoted name in the meta block).
Name(Name),
/// A string (either `"string"` or `[[string]]`).
Str(Str),
/// The end of file.
///
/// A valid stream of tokens is expected to have only one EOF token at the end.
EOF,
}
impl Localize for Tok {
fn fmt_localized(&self, f: &mut fmt::Formatter, locale: Locale) -> fmt::Result {
match (&locale[..], self) {
("ko", &Tok::Error) => write!(f, "잘못된 문자"),
(_, &Tok::Error) => write!(f, "an invalid character"),
("ko", &Tok::Comment) => write!(f, "주석"),
(_, &Tok::Comment) => write!(f, "a comment"),
(_, &Tok::Punct(p)) => write!(f, "{}", Localized::new(&p, locale)),
(_, &Tok::Keyword(w)) => write!(f, "{}", Localized::new(&w, locale)),
("ko", &Tok::Num(_)) => write!(f, "숫자"),
(_, &Tok::Num(_)) => write!(f, "a number"),
("ko", &Tok::Name(_)) => write!(f, "이름"),
(_, &Tok::Name(_)) => write!(f, "a name"),
("ko", &Tok::Str(_)) => write!(f, "문자열 리터럴"),
(_, &Tok::Str(_)) => write!(f, "a string literal"),
("ko", &Tok::EOF) => write!(f, "파일의 끝"),
(_, &Tok::EOF) => write!(f, "the end of file"),
}
}
}
impl<'a> Localize for &'a Tok {
fn fmt_localized(&self, f: &mut fmt::Formatter, locale: Locale) -> fmt::Result {
(**self).fmt_localized(f, locale)
}
}
macro_rules! define_puncts {
($ty:ident |$locale:ident|: $($i:ident $t:expr, #[$m:meta])*) => (
/// A punctuation.
///
/// This includes Kailua-specific punctuations,
/// which are only generated in the meta block (marked as [M] below).
/// Some of them are also only generated after a particular Lua version
/// (marked as [5.x+] below).
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum $ty { $(#[$m] $i,)* }
impl Localize for $ty {
fn fmt_localized(&self, f: &mut fmt::Formatter, $locale: Locale) -> fmt::Result {
let text = match *self { $($ty::$i => $t,)* };
fmt::Display::fmt(text, f)
}
}
);
}
define_puncts! { Punct |locale|:
Plus "`+`", /// `+`.
Dash "`-`", /// `-`.
Star "`*`", /// `*`.
Slash "`/`", /// `/`.
Percent "`%`", /// `%`.
Caret "`^`", /// `^`.
Hash "`#`", /// `#`.
EqEq "`==`", /// `==`.
TildeEq "`~=`", /// `~=`.
LtEq "`<=`", /// `<=`.
GtEq "`>=`", /// `>=`.
Lt "`<`", /// `<`.
Gt "`>`", /// `>`.
Eq "`=`", /// `=`.
Amp "`&`", /// `&`. [5.3+]
Tilde "`~`", /// `~`. [5.3+]
Pipe "`|`", /// `|`. [5.3+ or M]
LtLt "`<<`", /// `<<`. [5.3+]
GtGt "`>>`", /// `>>`. [5.3+]
SlashSlash "`//`", /// `//`. [5.3+]
LParen "`(`", /// `(`.
RParen "`)`", /// `)`.
LBrace "`{`", /// `{`.
RBrace "`}`", /// `}`.
LBracket "`[`", /// `[`.
RBracket "`]`", /// `]`.
Semicolon "`;`", /// `;`.
Colon "`:`", /// `:`.
ColonColon "`::`", /// `::`. [5.2+]
Comma "`,`", /// `,`.
Dot "`.`", /// `.`.
DotDot "`..`", /// `..`.
DotDotDot "`...`", /// `...`.
// Kailua extensions
DashDashHash "`--#`", /// `--#`. [M]
DashDashV "`--v`", /// `--v`. [M]
DashDashColon "`--:`", /// `--:`. [M]
DashDashGt "`-->`", /// `-->`. [M]
Ques "`?`", /// `?`. [M]
Bang "`!`", /// `!`. [M]
Newline match &locale[..] { "ko" => "개행문자", _ => "a newline" },
/// A newline. Only generated at the end of the meta block.
}
macro_rules! define_keywords {
($ty:ident: everywhere { $($i:ident $t:expr, #[$m:meta])* }
meta_only { $($mi:ident $mt:expr, #[$mm:meta])* }) => (
/// A keyword.
///
/// This includes Kailua-specific keywords,
/// which are only generated in the meta block (marked as [M] below).
/// Some of them are also only generated after a particular Lua version
/// (marked as [5.x+] below).
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum $ty { $(#[$m] $i,)* $(#[$mm] $mi,)* }
impl $ty {
pub fn from(s: &[u8], in_meta: bool) -> Option<Keyword> {
match (in_meta, s) {
$((_, $t) => Some(Keyword::$i),)*
$((true, $mt) => Some(Keyword::$mi),)*
(_, _) => None,
}
}
pub fn name(&self) -> &'static [u8] {
match *self { $($ty::$i => $t,)* $($ty::$mi => $mt,)* }
}
}
);
}
define_keywords! { Keyword:
everywhere {
And b"and", /// `and`.
Break b"break", /// `break`.
Do b"do", /// `do`.
Else b"else", /// `else`.
Elseif b"elseif", /// `elseif`.
End b"end", /// `end`.
False b"false", /// `false`.
For b"for", /// `for`.
Function b"function", /// `function`.
Goto b"goto", /// `goto`. [5.2+; a normal identifier in Lua 5.1]
If b"if", /// `if`.
In b"in", /// `in`.
Local b"local", /// `local`.
Nil b"nil", /// `nil`.
Not b"not", /// `not`.
Or b"or", /// `or`.
Repeat b"repeat", /// `repeat`.
Return b"return", /// `return`.
Then b"then", /// `then`.
True b"true", /// `true`.
Until b"until", /// `until`.
While b"while", /// `while`.
}<|fim▁hole|> Class b"class", /// `class`. [M]
Const b"const", /// `const`. [M]
Global b"global", /// `global`. [M]
Map b"map", /// `map`. [M]
Method b"method", /// `method`. [M]
Module b"module", /// `module`. [M]
Once b"once", /// `once`. [M]
Open b"open", /// `open`. [M]
Static b"static", /// `static`. [M]
Type b"type", /// `type`. [M]
Var b"var", /// `var`. [M]
Vector b"vector", /// `vector`. [M]
}
}
impl From<Keyword> for Str {
fn from(kw: Keyword) -> Str {
kw.name().into()
}
}
impl From<Keyword> for Name {
fn from(kw: Keyword) -> Name {
kw.name().into()
}
}
impl Localize for Keyword {
fn fmt_localized(&self, f: &mut fmt::Formatter, locale: Locale) -> fmt::Result {
let name = str::from_utf8(self.name()).unwrap();
match &locale[..] {
"ko" => write!(f, "예약어 `{}`", name),
_ => write!(f, "a keyword `{}`", name),
}
}
}
mod lexer;
mod nesting;
pub use self::lexer::Lexer;
pub use self::nesting::{Nest, NestedToken, NestingCategory, NestingSerial};<|fim▁end|>
|
meta_only { // Kailua extensions
Assume b"assume", /// `assume`. [M]
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory<|fim▁hole|><|fim▁end|>
|
##############################################################################
from . import account_move
from . import account_move_line
from . import account_master_port
|
<|file_name|>action.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package state
import (
"time"
"github.com/juju/errors"
"github.com/juju/loggo"
"github.com/juju/names"
"github.com/juju/utils"
"gopkg.in/mgo.v2"
"gopkg.in/mgo.v2/bson"
"gopkg.in/mgo.v2/txn"
)
var actionLogger = loggo.GetLogger("juju.state.action")
// NewUUID wraps the utils.NewUUID() call, and exposes it as a var to
// facilitate patching.
var NewUUID = func() (utils.UUID, error) { return utils.NewUUID() }
// ActionStatus represents the possible end states for an action.
type ActionStatus string
const (
// ActionFailed signifies that the action did not complete successfully.
ActionFailed ActionStatus = "failed"
// ActionCompleted indicates that the action ran to completion as intended.
ActionCompleted ActionStatus = "completed"
// ActionCancelled means that the Action was cancelled before being run.
ActionCancelled ActionStatus = "cancelled"
// ActionPending is the default status when an Action is first queued.
ActionPending ActionStatus = "pending"
// ActionRunning indicates that the Action is currently running.
ActionRunning ActionStatus = "running"
)
const actionMarker string = "_a_"
type actionNotificationDoc struct {
// DocId is the composite _id that can be matched by an
// idPrefixWatcher that is configured to watch for the
// ActionReceiver Name() which makes up the first part of this
// composite _id.
DocId string `bson:"_id"`
// EnvUUID is the environment identifier.
EnvUUID string `bson:"env-uuid"`
// Receiver is the Name of the Unit or any other ActionReceiver for
// which this notification is queued.
Receiver string `bson:"receiver"`
// ActionID is the unique identifier for the Action this notification
// represents.
ActionID string `bson:"actionid"`
}
type actionDoc struct {
// DocId is the key for this document; it is a UUID.
DocId string `bson:"_id"`
// EnvUUID is the environment identifier.
EnvUUID string `bson:"env-uuid"`
// Receiver is the Name of the Unit or any other ActionReceiver for
// which this Action is queued.
Receiver string `bson:"receiver"`
// Name identifies the action that should be run; it should
// match an action defined by the unit's charm.
Name string `bson:"name"`
// Parameters holds the action's parameters, if any; it should validate
// against the schema defined by the named action in the unit's charm.
Parameters map[string]interface{} `bson:"parameters"`
// Enqueued is the time the action was added.
Enqueued time.Time `bson:"enqueued"`
// Started reflects the time the action began running.
Started time.Time `bson:"started"`
// Completed reflects the time that the action was finished.
Completed time.Time `bson:"completed"`
// Status represents the end state of the Action; ActionFailed for an
// action that was removed prematurely, or that failed, and
// ActionCompleted for an action that successfully completed.
Status ActionStatus `bson:"status"`
// Message captures any error returned by the action.
Message string `bson:"message"`
// Results are the structured results from the action.
Results map[string]interface{} `bson:"results"`
}
// Action represents an instruction to do some "action" and is expected
// to match an action definition in a charm.
type Action struct {
st *State
doc actionDoc
}
// Id returns the local id of the Action.
func (a *Action) Id() string {
return a.st.localID(a.doc.DocId)
}
// Receiver returns the Name of the ActionReceiver for which this action
// is enqueued. Usually this is a Unit Name().
func (a *Action) Receiver() string {
return a.doc.Receiver
}
// Name returns the name of the action, as defined in the charm.
func (a *Action) Name() string {
return a.doc.Name
}
// Parameters will contain a structure representing arguments or parameters to
// an action, and is expected to be validated by the Unit using the Charm
// definition of the Action.
func (a *Action) Parameters() map[string]interface{} {
return a.doc.Parameters
}
// Enqueued returns the time the action was added to state as a pending
// Action.
func (a *Action) Enqueued() time.Time {
return a.doc.Enqueued
}
// Started returns the time that the Action execution began.
func (a *Action) Started() time.Time {
return a.doc.Started
}
// Completed returns the completion time of the Action.
func (a *Action) Completed() time.Time {
return a.doc.Completed
}
// Status returns the final state of the action.
func (a *Action) Status() ActionStatus {
return a.doc.Status
}
// Results returns the structured output of the action and any error.
func (a *Action) Results() (map[string]interface{}, string) {
return a.doc.Results, a.doc.Message
}
// ValidateTag should be called before calls to Tag() or ActionTag(). It verifies
// that the Action can produce a valid Tag.
func (a *Action) ValidateTag() bool {
return names.IsValidAction(a.Id())
}
// Tag implements the Entity interface and returns a names.Tag that
// is a names.ActionTag.
func (a *Action) Tag() names.Tag {
return a.ActionTag()
}
// ActionTag returns an ActionTag constructed from this action's
// Prefix and Sequence.
func (a *Action) ActionTag() names.ActionTag {
return names.NewActionTag(a.Id())
}
// ActionResults is a data transfer object that holds the key Action
// output and results information.
type ActionResults struct {
Status ActionStatus `json:"status"`
Results map[string]interface{} `json:"results"`
Message string `json:"message"`
}
// Begin marks an action as running, and logs the time it was started.
// It asserts that the action is currently pending.
func (a *Action) Begin() (*Action, error) {
err := a.st.runTransaction([]txn.Op{
{
C: actionsC,
Id: a.doc.DocId,
Assert: bson.D{{"status", ActionPending}},
Update: bson.D{{"$set", bson.D{
{"status", ActionRunning},
{"started", nowToTheSecond()},
}}},
}})
if err != nil {
return nil, err
}
return a.st.Action(a.Id())
}
// Finish removes action from the pending queue and captures the output
// and end state of the action.
func (a *Action) Finish(results ActionResults) (*Action, error) {
return a.removeAndLog(results.Status, results.Results, results.Message)
}
// removeAndLog takes the action off of the pending queue, and creates
// an actionresult to capture the outcome of the action. It asserts that
// the action is not already completed.
func (a *Action) removeAndLog(finalStatus ActionStatus, results map[string]interface{}, message string) (*Action, error) {
err := a.st.runTransaction([]txn.Op{
{
C: actionsC,
Id: a.doc.DocId,
Assert: bson.D{{"status", bson.D{
{"$nin", []interface{}{
ActionCompleted,
ActionCancelled,
ActionFailed,
}}}}},
Update: bson.D{{"$set", bson.D{
{"status", finalStatus},
{"message", message},
{"results", results},
{"completed", nowToTheSecond()},
}}},
}, {
C: actionNotificationsC,
Id: a.st.docID(ensureActionMarker(a.Receiver()) + a.Id()),
Remove: true,
}})
if err != nil {
return nil, err
}
return a.st.Action(a.Id())
}
// newActionTagFromNotification converts an actionNotificationDoc into
// an names.ActionTag
func newActionTagFromNotification(doc actionNotificationDoc) names.ActionTag {
actionLogger.Debugf("newActionTagFromNotification doc: '%#v'", doc)
return names.NewActionTag(doc.ActionID)
}
// newAction builds an Action for the given State and actionDoc.
func newAction(st *State, adoc actionDoc) *Action {
return &Action{
st: st,
doc: adoc,
}
}
// newActionDoc builds the actionDoc with the given name and parameters.
func newActionDoc(st *State, receiverTag names.Tag, actionName string, parameters map[string]interface{}) (actionDoc, actionNotificationDoc, error) {
prefix := ensureActionMarker(receiverTag.Id())
actionId, err := NewUUID()
if err != nil {
return actionDoc{}, actionNotificationDoc{}, err
}
actionLogger.Debugf("newActionDoc name: '%s', receiver: '%s', actionId: '%s'", actionName, receiverTag, actionId)
envuuid := st.EnvironUUID()
return actionDoc{
DocId: st.docID(actionId.String()),
EnvUUID: envuuid,
Receiver: receiverTag.Id(),
Name: actionName,
Parameters: parameters,
Enqueued: nowToTheSecond(),
Status: ActionPending,
}, actionNotificationDoc{
DocId: st.docID(prefix + actionId.String()),
EnvUUID: envuuid,
Receiver: receiverTag.Id(),
ActionID: actionId.String(),
}, nil
}
var ensureActionMarker = ensureSuffixFn(actionMarker)
// Action returns an Action by Id, which is a UUID.
func (st *State) Action(id string) (*Action, error) {
actionLogger.Tracef("Action() %q", id)
actions, closer := st.getCollection(actionsC)
defer closer()
doc := actionDoc{}
err := actions.FindId(id).One(&doc)
if err == mgo.ErrNotFound {
return nil, errors.NotFoundf("action %q", id)
}
if err != nil {
return nil, errors.Annotatef(err, "cannot get action %q", id)
}
actionLogger.Tracef("Action() %q found %+v", id, doc)
return newAction(st, doc), nil
}
// ActionByTag returns an Action given an ActionTag.
func (st *State) ActionByTag(tag names.ActionTag) (*Action, error) {
return st.Action(tag.Id())
}
// FindActionTagsByPrefix finds Actions with ids that share the supplied prefix, and
// returns a list of corresponding ActionTags.
func (st *State) FindActionTagsByPrefix(prefix string) []names.ActionTag {
actionLogger.Tracef("FindActionTagsByPrefix() %q", prefix)
var results []names.ActionTag
var doc struct {
Id string `bson:"_id"`
}
actions, closer := st.getCollection(actionsC)
defer closer()
iter := actions.Find(bson.D{{"_id", bson.D{{"$regex", "^" + st.docID(prefix)}}}}).Iter()
for iter.Next(&doc) {
actionLogger.Tracef("FindActionTagsByPrefix() iter doc %+v", doc)
localID := st.localID(doc.Id)
if names.IsValidAction(localID) {
results = append(results, names.NewActionTag(localID))
}
}
actionLogger.Tracef("FindActionTagsByPrefix() %q found %+v", prefix, results)
return results
}
// EnqueueAction
func (st *State) EnqueueAction(receiver names.Tag, actionName string, payload map[string]interface{}) (*Action, error) {<|fim▁hole|> if len(actionName) == 0 {
return nil, errors.New("action name required")
}
receiverCollectionName, receiverId, err := st.tagToCollectionAndId(receiver)
if err != nil {
return nil, errors.Trace(err)
}
doc, ndoc, err := newActionDoc(st, receiver, actionName, payload)
if err != nil {
return nil, errors.Trace(err)
}
ops := []txn.Op{{
C: receiverCollectionName,
Id: receiverId,
Assert: notDeadDoc,
}, {
C: actionsC,
Id: doc.DocId,
Assert: txn.DocMissing,
Insert: doc,
}, {
C: actionNotificationsC,
Id: ndoc.DocId,
Assert: txn.DocMissing,
Insert: ndoc,
}}
buildTxn := func(attempt int) ([]txn.Op, error) {
if notDead, err := isNotDead(st, receiverCollectionName, receiverId); err != nil {
return nil, err
} else if !notDead {
return nil, ErrDead
} else if attempt != 0 {
return nil, errors.Errorf("unexpected attempt number '%d'", attempt)
}
return ops, nil
}
if err = st.run(buildTxn); err == nil {
return newAction(st, doc), nil
}
return nil, err
}
// matchingActions finds actions that match ActionReceiver.
func (st *State) matchingActions(ar ActionReceiver) ([]*Action, error) {
return st.matchingActionsByReceiverId(ar.Tag().Id())
}
// matchingActionsByReceiverId finds actions that match ActionReceiver name.
func (st *State) matchingActionsByReceiverId(id string) ([]*Action, error) {
var doc actionDoc
var actions []*Action
actionsCollection, closer := st.getCollection(actionsC)
defer closer()
iter := actionsCollection.Find(bson.D{{"receiver", id}}).Iter()
for iter.Next(&doc) {
actions = append(actions, newAction(st, doc))
}
return actions, errors.Trace(iter.Close())
}
// matchingActionNotifications finds actionNotifications that match ActionReceiver.
func (st *State) matchingActionNotifications(ar ActionReceiver) ([]names.ActionTag, error) {
return st.matchingActionNotificationsByReceiverId(ar.Tag().Id())
}
// matchingActionNotificationsByReceiverId finds actionNotifications that match ActionReceiver.
func (st *State) matchingActionNotificationsByReceiverId(id string) ([]names.ActionTag, error) {
var doc actionNotificationDoc
var tags []names.ActionTag
notificationCollection, closer := st.getCollection(actionNotificationsC)
defer closer()
iter := notificationCollection.Find(bson.D{{"receiver", id}}).Iter()
for iter.Next(&doc) {
tags = append(tags, newActionTagFromNotification(doc))
}
return tags, errors.Trace(iter.Close())
}
// matchingActionsPending finds actions that match ActionReceiver and
// that are pending.
func (st *State) matchingActionsPending(ar ActionReceiver) ([]*Action, error) {
completed := bson.D{{"status", ActionPending}}
return st.matchingActionsByReceiverAndStatus(ar.Tag(), completed)
}
// matchingActionsRunning finds actions that match ActionReceiver and
// that are running.
func (st *State) matchingActionsRunning(ar ActionReceiver) ([]*Action, error) {
completed := bson.D{{"status", ActionRunning}}
return st.matchingActionsByReceiverAndStatus(ar.Tag(), completed)
}
// matchingActionsCompleted finds actions that match ActionReceiver and
// that are complete.
func (st *State) matchingActionsCompleted(ar ActionReceiver) ([]*Action, error) {
completed := bson.D{{"$or", []bson.D{
{{"status", ActionCompleted}},
{{"status", ActionCancelled}},
{{"status", ActionFailed}},
}}}
return st.matchingActionsByReceiverAndStatus(ar.Tag(), completed)
}
// matchingActionsByReceiverAndStatus finds actionNotifications that
// match ActionReceiver.
func (st *State) matchingActionsByReceiverAndStatus(tag names.Tag, statusCondition bson.D) ([]*Action, error) {
var doc actionDoc
var actions []*Action
actionsCollection, closer := st.getCollection(actionsC)
defer closer()
sel := append(bson.D{{"receiver", tag.Id()}}, statusCondition...)
iter := actionsCollection.Find(sel).Iter()
for iter.Next(&doc) {
actions = append(actions, newAction(st, doc))
}
return actions, errors.Trace(iter.Close())
}<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
from . import website
|
<|file_name|>flac.js<|end_file_name|><|fim▁begin|>(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
exports.FLACDemuxer = require('./src/demuxer');
exports.FLACDecoder = require('./src/decoder');
require('./src/ogg');
},{"./src/decoder":2,"./src/demuxer":3,"./src/ogg":4}],2:[function(require,module,exports){
/*
* FLAC.js - Free Lossless Audio Codec decoder in JavaScript
* Original C version from FFmpeg (c) 2003 Alex Beregszaszi
* JavaScript port by Devon Govett and Jens Nockert of Official.fm Labs
*
* Licensed under the same terms as the original. The original
* license follows.
*
* FLAC.js is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FLAC.js is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*/
var AV = (window.AV);
var FLACDecoder = AV.Decoder.extend(function() {
AV.Decoder.register('flac', this);
this.prototype.setCookie = function(cookie) {
this.cookie = cookie;
// initialize arrays
this.decoded = [];
for (var i = 0; i < this.format.channelsPerFrame; i++) {
this.decoded[i] = new Int32Array(cookie.maxBlockSize);
}
};
const BLOCK_SIZES = new Int16Array([
0, 192, 576 << 0, 576 << 1, 576 << 2, 576 << 3, 0, 0,
256 << 0, 256 << 1, 256 << 2, 256 << 3, 256 << 4, 256 << 5, 256 << 6, 256 << 7
]);
const SAMPLE_RATES = new Int32Array([
0, 88200, 176400, 192000,
8000, 16000, 22050, 24000, 32000, 44100, 48000, 96000,
0, 0, 0, 0
]);
const SAMPLE_SIZES = new Int8Array([
0, 8, 12, 0, 16, 20, 24, 0
]);
const MAX_CHANNELS = 8,
CHMODE_INDEPENDENT = 0,
CHMODE_LEFT_SIDE = 8,
CHMODE_RIGHT_SIDE = 9,
CHMODE_MID_SIDE = 10;
this.prototype.readChunk = function() {
var stream = this.bitstream;
if (!stream.available(32))
return;
// frame sync code
if ((stream.read(15) & 0x7FFF) !== 0x7FFC)
throw new Error('Invalid sync code');
var isVarSize = stream.read(1), // variable block size stream code
bsCode = stream.read(4), // block size
srCode = stream.read(4), // sample rate code
chMode = stream.read(4), // channel mode
bpsCode = stream.read(3); // bits per sample
stream.advance(1); // reserved bit
// channels
this.chMode = chMode;
var channels;
if (chMode < MAX_CHANNELS) {
channels = chMode + 1;
this.chMode = CHMODE_INDEPENDENT;
} else if (chMode <= CHMODE_MID_SIDE) {
channels = 2;
} else {
throw new Error('Invalid channel mode');
}
if (channels !== this.format.channelsPerFrame)
throw new Error('Switching channel layout mid-stream not supported.');
// bits per sample
if (bpsCode === 3 || bpsCode === 7)
throw new Error('Invalid sample size code');
this.bps = SAMPLE_SIZES[bpsCode];
if (this.bps !== this.format.bitsPerChannel)
throw new Error('Switching bits per sample mid-stream not supported.');
var sampleShift, is32;
if (this.bps > 16) {
sampleShift = 32 - this.bps;
is32 = true;
} else {
sampleShift = 16 - this.bps;
is32 = false;
}
// sample number or frame number
// see http://www.hydrogenaudio.org/forums/index.php?s=ea7085ffe6d57132c36e6105c0d434c9&showtopic=88390&pid=754269&st=0&#entry754269
var ones = 0;
while (stream.read(1) === 1)
ones++;
var frame_or_sample_num = stream.read(7 - ones);
for (; ones > 1; ones--) {
stream.advance(2); // == 2
frame_or_sample_num = (frame_or_sample_num << 6) | stream.read(6);
}
// block size
if (bsCode === 0)
throw new Error('Reserved blocksize code');
else if (bsCode === 6)
this.blockSize = stream.read(8) + 1;
else if (bsCode === 7)
this.blockSize = stream.read(16) + 1;
else
this.blockSize = BLOCK_SIZES[bsCode];
// sample rate
var sampleRate;
if (srCode < 12)
sampleRate = SAMPLE_RATES[srCode];
else if (srCode === 12)
sampleRate = stream.read(8) * 1000;
else if (srCode === 13)
sampleRate = stream.read(16);
else if (srCode === 14)
sampleRate = stream.read(16) * 10;
else
throw new Error('Invalid sample rate code');
stream.advance(8); // skip CRC check
// subframes
for (var i = 0; i < channels; i++)
this.decodeSubframe(i);
stream.align();
stream.advance(16); // skip CRC frame footer
var output = new ArrayBuffer(this.blockSize * channels * this.bps / 8),
buf = is32 ? new Int32Array(output) : new Int16Array(output),
blockSize = this.blockSize,
decoded = this.decoded,
j = 0;
switch (this.chMode) {
case CHMODE_INDEPENDENT:
for (var k = 0; k < blockSize; k++) {
for (var i = 0; i < channels; i++) {
buf[j++] = decoded[i][k] << sampleShift;
}
}
break;
case CHMODE_LEFT_SIDE:
for (var i = 0; i < blockSize; i++) {
var left = decoded[0][i],
right = decoded[1][i];
buf[j++] = left << sampleShift;
buf[j++] = (left - right) << sampleShift;
}
break;
case CHMODE_RIGHT_SIDE:
for (var i = 0; i < blockSize; i++) {
var left = decoded[0][i],
right = decoded[1][i];
buf[j++] = (left + right) << sampleShift;
buf[j++] = right << sampleShift;
}
break;
case CHMODE_MID_SIDE:
for (var i = 0; i < blockSize; i++) {
var left = decoded[0][i],
right = decoded[1][i];
left -= right >> 1;
buf[j++] = (left + right) << sampleShift;
buf[j++] = left << sampleShift;
}
break;
}
return buf;
};
this.prototype.decodeSubframe = function(channel) {
var wasted = 0,
stream = this.bitstream,
blockSize = this.blockSize,
decoded = this.decoded;
this.curr_bps = this.bps;
if (channel === 0) {
if (this.chMode === CHMODE_RIGHT_SIDE)
this.curr_bps++;
} else {
if (this.chMode === CHMODE_LEFT_SIDE || this.chMode === CHMODE_MID_SIDE)
this.curr_bps++;
}
if (stream.read(1))
throw new Error("Invalid subframe padding");
var type = stream.read(6);
if (stream.read(1)) {
wasted = 1;
while (!stream.read(1))
wasted++;
this.curr_bps -= wasted;
}
if (this.curr_bps > 32)
throw new Error("decorrelated bit depth > 32 (" + this.curr_bps + ")");
if (type === 0) {
var tmp = stream.read(this.curr_bps, true);
for (var i = 0; i < blockSize; i++)
decoded[channel][i] = tmp;
} else if (type === 1) {
var bps = this.curr_bps;
for (var i = 0; i < blockSize; i++)
decoded[channel][i] = stream.read(bps, true);
} else if ((type >= 8) && (type <= 12)) {
this.decode_subframe_fixed(channel, type & ~0x8);
} else if (type >= 32) {
this.decode_subframe_lpc(channel, (type & ~0x20) + 1);
} else {
throw new Error("Invalid coding type");
}
if (wasted) {
for (var i = 0; i < blockSize; i++)
decoded[channel][i] <<= wasted;
}
};
this.prototype.decode_subframe_fixed = function(channel, predictor_order) {
var decoded = this.decoded[channel],
stream = this.bitstream,
bps = this.curr_bps;
// warm up samples
for (var i = 0; i < predictor_order; i++)
decoded[i] = stream.read(bps, true);
this.decode_residuals(channel, predictor_order);
var a = 0, b = 0, c = 0, d = 0;
if (predictor_order > 0)
a = decoded[predictor_order - 1];
if (predictor_order > 1)
b = a - decoded[predictor_order - 2];
if (predictor_order > 2)
c = b - decoded[predictor_order - 2] + decoded[predictor_order - 3];
if (predictor_order > 3)
d = c - decoded[predictor_order - 2] + 2 * decoded[predictor_order - 3] - decoded[predictor_order - 4];
switch (predictor_order) {
case 0:
break;
case 1:
case 2:
case 3:
case 4:
var abcd = new Int32Array([a, b, c, d]),
blockSize = this.blockSize;
for (var i = predictor_order; i < blockSize; i++) {
abcd[predictor_order - 1] += decoded[i];
for (var j = predictor_order - 2; j >= 0; j--) {
abcd[j] += abcd[j + 1];
}
decoded[i] = abcd[0];
}
break;
default:
throw new Error("Invalid Predictor Order " + predictor_order);
}
};
this.prototype.decode_subframe_lpc = function(channel, predictor_order) {
var stream = this.bitstream,
decoded = this.decoded[channel],
bps = this.curr_bps,
blockSize = this.blockSize;
// warm up samples
for (var i = 0; i < predictor_order; i++) {
decoded[i] = stream.read(bps, true);
}
var coeff_prec = stream.read(4) + 1;
if (coeff_prec === 16)
throw new Error("Invalid coefficient precision");
var qlevel = stream.read(5, true);
if (qlevel < 0)
throw new Error("Negative qlevel, maybe buggy stream");
var coeffs = new Int32Array(32);
for (var i = 0; i < predictor_order; i++) {
coeffs[i] = stream.read(coeff_prec, true);
}
this.decode_residuals(channel, predictor_order);
if (this.bps > 16)
throw new Error("no 64-bit integers in JS, could probably use doubles though");
for (var i = predictor_order; i < blockSize - 1; i += 2) {
var d = decoded[i - predictor_order],
s0 = 0, s1 = 0, c;
for (var j = predictor_order - 1; j > 0; j--) {
c = coeffs[j];
s0 += c * d;
d = decoded[i - j];
s1 += c * d;
}
c = coeffs[0];
s0 += c * d;
d = decoded[i] += (s0 >> qlevel);
s1 += c * d;
decoded[i + 1] += (s1 >> qlevel);
}
if (i < blockSize) {
var sum = 0;
for (var j = 0; j < predictor_order; j++)
sum += coeffs[j] * decoded[i - j - 1];
decoded[i] += (sum >> qlevel);
}
};
const INT_MAX = 32767;
this.prototype.decode_residuals = function(channel, predictor_order) {
var stream = this.bitstream,
method_type = stream.read(2);
if (method_type > 1)
throw new Error('Illegal residual coding method ' + method_type);
var rice_order = stream.read(4),
samples = (this.blockSize >>> rice_order);
if (predictor_order > samples)
throw new Error('Invalid predictor order ' + predictor_order + ' > ' + samples);
var decoded = this.decoded[channel],
sample = predictor_order,
i = predictor_order;
for (var partition = 0; partition < (1 << rice_order); partition++) {
var tmp = stream.read(method_type === 0 ? 4 : 5);
if (tmp === (method_type === 0 ? 15 : 31)) {
tmp = stream.read(5);
for (; i < samples; i++)
decoded[sample++] = stream.read(tmp, true);
} else {
for (; i < samples; i++)
decoded[sample++] = this.golomb(tmp, INT_MAX, 0);
}
i = 0;
}
};
const MIN_CACHE_BITS = 25;
this.prototype.golomb = function(k, limit, esc_len) {
var data = this.bitstream,
offset = data.bitPosition,
buf = data.peek(32 - offset) << offset,
v = 0;
var log = 31 - clz(buf | 1); // log2(buf)
if (log - k >= 32 - MIN_CACHE_BITS && 32 - log < limit) {
buf >>>= log - k;
buf += (30 - log) << k;
data.advance(32 + k - log);
v = buf;
} else {
for (var i = 0; data.read(1) === 0; i++)
buf = data.peek(32 - offset) << offset;
if (i < limit - 1) {
if (k)
buf = data.read(k);
else
buf = 0;
v = buf + (i << k);
} else if (i === limit - 1) {
buf = data.read(esc_len);
v = buf + 1;
} else {
v = -1;
}
}
return (v >> 1) ^ -(v & 1);
};
// Should be in the damned standard library...
function clz(input) {
var output = 0,
curbyte = 0;
while(true) { // emulate goto in JS using the break statement :D
curbyte = input >>> 24;
if (curbyte) break;
output += 8;
curbyte = input >>> 16;
if (curbyte & 0xff) break;
output += 8;
curbyte = input >>> 8;
if (curbyte & 0xff) break;
output += 8;
curbyte = input;
if (curbyte & 0xff) break;
output += 8;
return output;
}
if (!(curbyte & 0xf0))
output += 4;
else
curbyte >>>= 4;
if (curbyte & 0x8)
return output;
if (curbyte & 0x4)
return output + 1;
if (curbyte & 0x2)
return output + 2;
if (curbyte & 0x1)
return output + 3;
// shouldn't get here
return output + 4;
}
});
module.exports = FLACDecoder;
},{}],3:[function(require,module,exports){
/*
* FLAC.js - Free Lossless Audio Codec decoder in JavaScript
* By Devon Govett and Jens Nockert of Official.fm Labs
*
* FLAC.js is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FLAC.js is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
*/
var AV = (window.AV);
var FLACDemuxer = AV.Demuxer.extend(function() {
AV.Demuxer.register(this);
this.probe = function(buffer) {
return buffer.peekString(0, 4) === 'fLaC';
}
const STREAMINFO = 0,
PADDING = 1,
APPLICATION = 2,
SEEKTABLE = 3,
VORBIS_COMMENT = 4,
CUESHEET = 5,
PICTURE = 6,
INVALID = 127,
STREAMINFO_SIZE = 34;
this.prototype.readChunk = function() {
var stream = this.stream;
if (!this.readHeader && stream.available(4)) {
if (stream.readString(4) !== 'fLaC')
return this.emit('error', 'Invalid FLAC file.');
this.readHeader = true;
}
while (stream.available(1) && !this.last) {
if (!this.readBlockHeaders) {
var tmp = stream.readUInt8();
this.last = (tmp & 0x80) === 0x80,
this.type = tmp & 0x7F,
this.size = stream.readUInt24();
}
if (!this.foundStreamInfo && this.type !== STREAMINFO)
return this.emit('error', 'STREAMINFO must be the first block');
if (!stream.available(this.size))
return;
switch (this.type) {
case STREAMINFO:
if (this.foundStreamInfo)
return this.emit('error', 'STREAMINFO can only occur once.');
if (this.size !== STREAMINFO_SIZE)
return this.emit('error', 'STREAMINFO size is wrong.');
this.foundStreamInfo = true;
var bitstream = new AV.Bitstream(stream);
var cookie = {
minBlockSize: bitstream.read(16),
maxBlockSize: bitstream.read(16),
minFrameSize: bitstream.read(24),
maxFrameSize: bitstream.read(24)
};
this.format = {
formatID: 'flac',
sampleRate: bitstream.read(20),
channelsPerFrame: bitstream.read(3) + 1,
bitsPerChannel: bitstream.read(5) + 1
};
this.emit('format', this.format);
this.emit('cookie', cookie);
var sampleCount = bitstream.read(36);
this.emit('duration', sampleCount / this.format.sampleRate * 1000 | 0);
stream.advance(16); // skip MD5 hashes
this.readBlockHeaders = false;
break;
/*
I am only looking at the least significant 32 bits of sample number and offset data
This is more than sufficient for the longest flac file I have (~50 mins 2-channel 16-bit 44.1k which uses about 7.5% of the UInt32 space for the largest offset)
Can certainly be improved by storing sample numbers and offests as doubles, but would require additional overriding of the searchTimestamp and seek functions (possibly more?)
Also the flac faq suggests it would be possible to find frame lengths and thus create seek points on the fly via decoding but I assume this would be slow
I may look into these thigns though as my project progresses
*/
case SEEKTABLE:
for(var s=0; s<this.size/18; s++)
{
if(stream.peekUInt32(0) == 0xFFFFFFFF && stream.peekUInt32(1) == 0xFFFFFFFF)
{
//placeholder, ignore
stream.advance(18);
} else {
if(stream.readUInt32() > 0)
{
this.emit('error', 'Seek points with sample number >UInt32 not supported');
}
var samplenum = stream.readUInt32();
if(stream.readUInt32() > 0)
{
this.emit('error', 'Seek points with stream offset >UInt32 not supported');
}
var offset = stream.readUInt32();
stream.advance(2);
this.addSeekPoint(offset, samplenum);
}
}
break;
case VORBIS_COMMENT:
// see http://www.xiph.org/vorbis/doc/v-comment.html
this.metadata || (this.metadata = {});
var len = stream.readUInt32(true);
this.metadata.vendor = stream.readString(len);
var length = stream.readUInt32(true);
for (var i = 0; i < length; i++) {
len = stream.readUInt32(true);
var str = stream.readString(len, 'utf8'),
idx = str.indexOf('=');
this.metadata[str.slice(0, idx).toLowerCase()] = str.slice(idx + 1);
}
// TODO: standardize field names across formats
break;
case PICTURE:
var type = stream.readUInt32();
if (type !== 3) { // make sure this is album art (type 3)
stream.advance(this.size - 4);
} else {
var mimeLen = stream.readUInt32(),
mime = stream.readString(mimeLen),
descLen = stream.readUInt32(),
description = stream.readString(descLen),
width = stream.readUInt32(),
height = stream.readUInt32(),
depth = stream.readUInt32(),
colors = stream.readUInt32(),
length = stream.readUInt32(),
picture = stream.readBuffer(length);
this.metadata || (this.metadata = {});
this.metadata.coverArt = picture;
}
// does anyone want the rest of the info?<|fim▁hole|> default:
stream.advance(this.size);
this.readBlockHeaders = false;
}
if (this.last && this.metadata)
this.emit('metadata', this.metadata);
}
while (stream.available(1) && this.last) {
var buffer = stream.readSingleBuffer(stream.remainingBytes());
this.emit('data', buffer);
}
}
});
module.exports = FLACDemuxer;
},{}],4:[function(require,module,exports){
// if ogg.js exists, register a plugin
try {
var OggDemuxer = (window.AV.OggDemuxer);
} catch (e) {};
if (!OggDemuxer) return;
OggDemuxer.plugins.push({
magic: "\177FLAC",
init: function() {
this.list = new AV.BufferList();
this.stream = new AV.Stream(this.list);
},
readHeaders: function(packet) {
var stream = this.stream;
this.list.append(new AV.Buffer(packet));
stream.advance(5); // magic
if (stream.readUInt8() != 1)
throw new Error('Unsupported FLAC version');
stream.advance(3);
if (stream.peekString(0, 4) != 'fLaC')
throw new Error('Not flac');
this.flac = AV.Demuxer.find(stream.peekSingleBuffer(0, stream.remainingBytes()));
if (!this.flac)
throw new Error('Flac demuxer not found');
this.flac.prototype.readChunk.call(this);
return true;
},
readPacket: function(packet) {
this.list.append(new AV.Buffer(packet));
this.flac.prototype.readChunk.call(this);
}
});
},{}]},{},[1])
//# sourceMappingURL=flac.js.map<|fim▁end|>
|
break;
|
<|file_name|>rlseries_scraper.py<|end_file_name|><|fim▁begin|>"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
import kodi
import log_utils # @UnusedImport
import dom_parser
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import QUALITIES
from salts_lib.constants import VIDEO_TYPES
import scraper
BASE_URL = 'http://rlseries.com'
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):<|fim▁hole|> @classmethod
def get_name(cls):
return 'RLSeries'
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url and source_url != FORCE_NO_MATCH:
page_url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(page_url, cache_limit=1)
fragment = dom_parser.parse_dom(html, 'div', {'class': 'v_ifo'})
if fragment:
for stream_url in dom_parser.parse_dom(fragment[0], 'a', ret='href'):
host = urlparse.urlparse(stream_url).hostname
quality = scraper_utils.get_quality(video, host, QUALITIES.HIGH)
hoster = {'multi-part': False, 'host': host, 'class': self, 'quality': quality, 'views': None, 'rating': None, 'url': stream_url, 'direct': False}
hosters.append(hoster)
return hosters
def _get_episode_url(self, season_url, video):
episode_pattern = 'href="([^"]*episode-%s-[^"]*)' % (video.episode)
title_pattern = '<a[^>]*href="(?P<url>[^"]+)[^>]+title="Episode\s+\d+:\s*(?P<title>[^"]+)'
airdate_pattern = 'class="lst"[^>]+href="([^"]+)(?:[^>]+>){6}{p_day}/{p_month}/{year}<'
return self._default_get_episode_url(season_url, video, episode_pattern, title_pattern, airdate_pattern)
def search(self, video_type, title, year, season=''): # @UnusedVariable
results = []
if title and title[0].isalpha():
page_url = ['/list/?char=%s' % (title[0])]
while page_url:
page_url = urlparse.urljoin(self.base_url, page_url[0])
html = self._http_get(page_url, cache_limit=48)
fragment = dom_parser.parse_dom(html, 'ul', {'class': 'list-film-char'})
if fragment:
norm_title = scraper_utils.normalize_title(title)
for match in re.finditer('href="([^"]+)[^>]+>(.*?)</a>', fragment[0]):
match_url, match_title = match.groups()
match_title = re.sub('</?strong>', '', match_title)
match = re.search('Season\s+(\d+)', match_title, re.I)
if match:
if season and int(season) != int(match.group(1)):
continue
if norm_title in scraper_utils.normalize_title(match_title):
result = {'title': scraper_utils.cleanse_title(match_title), 'year': '', 'url': scraper_utils.pathify_url(match_url)}
results.append(result)
if results:
break
page_url = dom_parser.parse_dom(html, 'a', {'class': 'nextpostslink'}, ret='href')
return results<|fim▁end|>
|
return frozenset([VIDEO_TYPES.SEASON, VIDEO_TYPES.EPISODE])
|
<|file_name|>unboxed-closures-extern-fn.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>
// Checks that extern fn pointers implement the full range of Fn traits.
// pretty-expanded FIXME #23616
#![feature(unboxed_closures)]
#![feature(unboxed_closures)]
use std::ops::{Fn,FnMut,FnOnce};
fn square(x: isize) -> isize { x * x }
fn call_it<F:Fn(isize)->isize>(f: &F, x: isize) -> isize {
f(x)
}
fn call_it_mut<F:FnMut(isize)->isize>(f: &mut F, x: isize) -> isize {
f(x)
}
fn call_it_once<F:FnOnce(isize)->isize>(f: F, x: isize) -> isize {
f(x)
}
fn main() {
let x = call_it(&square, 22);
let y = call_it_mut(&mut square, 22);
let z = call_it_once(square, 22);
assert_eq!(x, square(22));
assert_eq!(y, square(22));
assert_eq!(z, square(22));
}<|fim▁end|>
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
<|file_name|>test_auth_crypt.py<|end_file_name|><|fim▁begin|>import json
import pytest
from indy import crypto, did, error
@pytest.mark.asyncio
async def test_auth_crypt_works_for_created_key(wallet_handle, seed_my1, verkey_my2, message):
verkey = await did.create_key(wallet_handle, json.dumps({'seed': seed_my1}))<|fim▁hole|> await crypto.auth_crypt(wallet_handle, verkey, verkey_my2, message)
@pytest.mark.asyncio
async def test_auth_crypt_works_for_unknown_sender_verkey(wallet_handle, verkey_my1, verkey_my2, message):
with pytest.raises(error.WalletItemNotFound):
await crypto.auth_crypt(wallet_handle, verkey_my1, verkey_my2, message)
@pytest.mark.asyncio
async def test_auth_crypt_works_for_invalid_handle(wallet_handle, verkey_my1, verkey_my2, message):
with pytest.raises(error.WalletInvalidHandle):
invalid_wallet_handle = wallet_handle + 1
await crypto.auth_crypt(invalid_wallet_handle, verkey_my1, verkey_my2, message)
@pytest.mark.asyncio
async def test_auth_crypt_works_for_invalid_recipient_vk(wallet_handle, identity_trustee1, message):
(_, key) = identity_trustee1
with pytest.raises(error.CommonInvalidStructure):
await crypto.auth_crypt(wallet_handle, key, 'CnEDk___MnmiHXEV1WFgbV___eYnPqs___TdcZaNhFVW', message)<|fim▁end|>
| |
<|file_name|>glue.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//!
//
// Code relating to drop glue.
use back::abi;
use back::link::*;
use llvm;
use llvm::{ValueRef, get_param};
use metadata::csearch;
use middle::lang_items::ExchangeFreeFnLangItem;
use middle::subst;
use middle::subst::{Subst, Substs};
use middle::ty::{self, Ty};
use trans::adt::GetDtorType; // for tcx.dtor_type()
use trans::adt;
use trans::attributes;
use trans::base::*;
use trans::build::*;
use trans::callee;
use trans::cleanup;
use trans::cleanup::CleanupMethods;
use trans::common::*;
use trans::debuginfo::DebugLoc;
use trans::declare;
use trans::expr;
use trans::foreign;
use trans::inline;
use trans::machine::*;
use trans::monomorphize;
use trans::type_of::{type_of, type_of_dtor, sizing_type_of, align_of};
use trans::type_::Type;
use arena::TypedArena;
use libc::c_uint;
use syntax::ast;
use syntax::attr::InlineAttr;
pub fn trans_exchange_free_dyn<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
v: ValueRef,
size: ValueRef,
align: ValueRef,
debug_loc: DebugLoc)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("trans_exchange_free");
let ccx = cx.ccx();
callee::trans_lang_call(cx,
langcall(cx, None, "", ExchangeFreeFnLangItem),
&[PointerCast(cx, v, Type::i8p(ccx)), size, align],
Some(expr::Ignore),
debug_loc).bcx
}
pub fn trans_exchange_free<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
v: ValueRef,
size: u64,
align: u32,
debug_loc: DebugLoc)
-> Block<'blk, 'tcx> {
trans_exchange_free_dyn(cx,
v,
C_uint(cx.ccx(), size),
C_uint(cx.ccx(), align),
debug_loc)
}
pub fn trans_exchange_free_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
ptr: ValueRef,
content_ty: Ty<'tcx>,
debug_loc: DebugLoc)
-> Block<'blk, 'tcx> {
assert!(type_is_sized(bcx.ccx().tcx(), content_ty));
let sizing_type = sizing_type_of(bcx.ccx(), content_ty);
let content_size = llsize_of_alloc(bcx.ccx(), sizing_type);
// `Box<ZeroSizeType>` does not allocate.
if content_size != 0 {
let content_align = align_of(bcx.ccx(), content_ty);
trans_exchange_free(bcx, ptr, content_size, content_align, debug_loc)
} else {
bcx
}
}
pub fn get_drop_glue_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
t: Ty<'tcx>) -> Ty<'tcx> {
let tcx = ccx.tcx();
// Even if there is no dtor for t, there might be one deeper down and we
// might need to pass in the vtable ptr.
if !type_is_sized(tcx, t) {
return t
}
// FIXME (#22815): note that type_needs_drop conservatively
// approximates in some cases and may say a type expression
// requires drop glue when it actually does not.
//
// (In this case it is not clear whether any harm is done, i.e.
// erroneously returning `t` in some cases where we could have
// returned `tcx.types.i8` does not appear unsound. The impact on
// code quality is unknown at this time.)
if !type_needs_drop(tcx, t) {
return tcx.types.i8;
}
match t.sty {
ty::TyBox(typ) if !type_needs_drop(tcx, typ)
&& type_is_sized(tcx, typ) => {
let llty = sizing_type_of(ccx, typ);
// `Box<ZeroSizeType>` does not allocate.
if llsize_of_alloc(ccx, llty) == 0 {
tcx.types.i8
} else {
t
}
}
_ => t
}
}
pub fn drop_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
debug_loc: DebugLoc) -> Block<'blk, 'tcx> {
drop_ty_core(bcx, v, t, debug_loc, false)
}
pub fn drop_ty_core<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
debug_loc: DebugLoc,
skip_dtor: bool) -> Block<'blk, 'tcx> {
// NB: v is an *alias* of type t here, not a direct value.
debug!("drop_ty_core(t={:?}, skip_dtor={})", t, skip_dtor);
let _icx = push_ctxt("drop_ty");
if bcx.fcx.type_needs_drop(t) {
let ccx = bcx.ccx();
let g = if skip_dtor {
DropGlueKind::TyContents(t)
} else {
DropGlueKind::Ty(t)
};
let glue = get_drop_glue_core(ccx, g);
let glue_type = get_drop_glue_type(ccx, t);
let ptr = if glue_type != t {
PointerCast(bcx, v, type_of(ccx, glue_type).ptr_to())
} else {
v
};
Call(bcx, glue, &[ptr], None, debug_loc);
}
bcx
}
pub fn drop_ty_immediate<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
t: Ty<'tcx>,
debug_loc: DebugLoc,
skip_dtor: bool)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("drop_ty_immediate");
let vp = alloca(bcx, type_of(bcx.ccx(), t), "");
store_ty(bcx, v, vp, t);
drop_ty_core(bcx, vp, t, debug_loc, skip_dtor)
}
pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
get_drop_glue_core(ccx, DropGlueKind::Ty(t))
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum DropGlueKind<'tcx> {
/// The normal path; runs the dtor, and then recurs on the contents
Ty(Ty<'tcx>),
/// Skips the dtor, if any, for ty; drops the contents directly.
/// Note that the dtor is only skipped at the most *shallow*
/// level, namely, an `impl Drop for Ty` itself. So, for example,
/// if Ty is Newtype(S) then only the Drop impl for for Newtype
/// itself will be skipped, while the Drop impl for S, if any,
/// will be invoked.
TyContents(Ty<'tcx>),
}
impl<'tcx> DropGlueKind<'tcx> {
fn ty(&self) -> Ty<'tcx> {
match *self { DropGlueKind::Ty(t) | DropGlueKind::TyContents(t) => t }
}
fn map_ty<F>(&self, mut f: F) -> DropGlueKind<'tcx> where F: FnMut(Ty<'tcx>) -> Ty<'tcx>
{
match *self {
DropGlueKind::Ty(t) => DropGlueKind::Ty(f(t)),
DropGlueKind::TyContents(t) => DropGlueKind::TyContents(f(t)),
}
}
}
fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
g: DropGlueKind<'tcx>) -> ValueRef {
debug!("make drop glue for {:?}", g);
let g = g.map_ty(|t| get_drop_glue_type(ccx, t));
debug!("drop glue type {:?}", g);
match ccx.drop_glues().borrow().get(&g) {
Some(&glue) => return glue,
_ => { }
}
let t = g.ty();
let llty = if type_is_sized(ccx.tcx(), t) {
type_of(ccx, t).ptr_to()
} else {
type_of(ccx, ccx.tcx().mk_box(t)).ptr_to()
};
let llfnty = Type::glue_fn(ccx, llty);
// To avoid infinite recursion, don't `make_drop_glue` until after we've
// added the entry to the `drop_glues` cache.
if let Some(old_sym) = ccx.available_drop_glues().borrow().get(&g) {
let llfn = declare::declare_cfn(ccx, &old_sym, llfnty, ccx.tcx().mk_nil());
ccx.drop_glues().borrow_mut().insert(g, llfn);
return llfn;
};
let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, "drop");
let llfn = declare::define_cfn(ccx, &fn_nm, llfnty, ccx.tcx().mk_nil()).unwrap_or_else(||{
ccx.sess().bug(&format!("symbol `{}` already defined", fn_nm));
});
ccx.available_drop_glues().borrow_mut().insert(g, fn_nm);
let _s = StatRecorder::new(ccx, format!("drop {:?}", t));
let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
let (arena, fcx): (TypedArena<_>, FunctionContext);
arena = TypedArena::new();
fcx = new_fn_ctxt(ccx, llfn, ast::DUMMY_NODE_ID, false,
ty::FnConverging(ccx.tcx().mk_nil()),
empty_substs, None, &arena);
let bcx = init_function(&fcx, false, ty::FnConverging(ccx.tcx().mk_nil()));
update_linkage(ccx, llfn, None, OriginalTranslation);
// FIXME: Currently LLVM has a bug where if an SSA value is created in one
// landing pad and then used in another it will abort during
// compilation. The compiler never actually generates nested landing
// pads, but this often arises when destructors are inlined into
// other functions. To prevent this inlining from happening (and thus
// preventing the LLVM abort) we mark all drop glue as inline(never)
// on MSVC.
//
// For more information about the bug, see:
//
// https://llvm.org/bugs/show_bug.cgi?id=23884
//
// This is clearly not the ideal solution to the problem (due to the
// perf hits), so this should be removed once the upstream bug is
// fixed.
if ccx.sess().target.target.options.is_like_msvc {
attributes::inline(llfn, InlineAttr::Never);
}
ccx.stats().n_glues_created.set(ccx.stats().n_glues_created.get() + 1);
// All glue functions take values passed *by alias*; this is a
// requirement since in many contexts glue is invoked indirectly and
// the caller has no idea if it's dealing with something that can be
// passed by value.
//
// llfn is expected be declared to take a parameter of the appropriate
// type, so we don't need to explicitly cast the function parameter.
let llrawptr0 = get_param(llfn, fcx.arg_offset() as c_uint);
let bcx = make_drop_glue(bcx, llrawptr0, g);
finish_fn(&fcx, bcx, ty::FnConverging(ccx.tcx().mk_nil()), DebugLoc::None);
llfn
}
fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
struct_data: ValueRef,
dtor_did: ast::DefId,
class_did: ast::DefId,
substs: &subst::Substs<'tcx>)
-> Block<'blk, 'tcx> {
assert!(type_is_sized(bcx.tcx(), t), "Precondition: caller must ensure t is sized");
let repr = adt::represent_type(bcx.ccx(), t);
let drop_flag = unpack_datum!(bcx, adt::trans_drop_flag_ptr(bcx, &*repr, struct_data));
let loaded = load_ty(bcx, drop_flag.val, bcx.tcx().dtor_type());
let drop_flag_llty = type_of(bcx.fcx.ccx, bcx.tcx().dtor_type());
let init_val = C_integral(drop_flag_llty, adt::DTOR_NEEDED as u64, false);
let bcx = if !bcx.ccx().check_drop_flag_for_sanity() {
bcx
} else {
let drop_flag_llty = type_of(bcx.fcx.ccx, bcx.tcx().dtor_type());
let done_val = C_integral(drop_flag_llty, adt::DTOR_DONE as u64, false);
let not_init = ICmp(bcx, llvm::IntNE, loaded, init_val, DebugLoc::None);
let not_done = ICmp(bcx, llvm::IntNE, loaded, done_val, DebugLoc::None);
let drop_flag_neither_initialized_nor_cleared =<|fim▁hole|> Call(cx, llfn, &[], None, DebugLoc::None);
cx
})
};
let drop_flag_dtor_needed = ICmp(bcx, llvm::IntEQ, loaded, init_val, DebugLoc::None);
with_cond(bcx, drop_flag_dtor_needed, |cx| {
trans_struct_drop(cx, t, struct_data, dtor_did, class_did, substs)
})
}
pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
did: ast::DefId,
t: Ty<'tcx>,
parent_id: ast::DefId,
substs: &Substs<'tcx>)
-> ValueRef {
let _icx = push_ctxt("trans_res_dtor");
let did = inline::maybe_instantiate_inline(ccx, did);
if !substs.types.is_empty() {
assert_eq!(did.krate, ast::LOCAL_CRATE);
// Since we're in trans we don't care for any region parameters
let substs = ccx.tcx().mk_substs(Substs::erased(substs.types.clone()));
let (val, _, _) = monomorphize::monomorphic_fn(ccx, did, substs, None);
val
} else if did.krate == ast::LOCAL_CRATE {
get_item_val(ccx, did.node)
} else {
let tcx = ccx.tcx();
let name = csearch::get_symbol(&ccx.sess().cstore, did);
let class_ty = tcx.lookup_item_type(parent_id).ty.subst(tcx, substs);
let llty = type_of_dtor(ccx, class_ty);
let dtor_ty = ccx.tcx().mk_ctor_fn(did,
&[get_drop_glue_type(ccx, t)],
ccx.tcx().mk_nil());
foreign::get_extern_fn(ccx, &mut *ccx.externs().borrow_mut(), &name[..], llvm::CCallConv,
llty, dtor_ty)
}
}
fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
v0: ValueRef,
dtor_did: ast::DefId,
class_did: ast::DefId,
substs: &subst::Substs<'tcx>)
-> Block<'blk, 'tcx>
{
debug!("trans_struct_drop t: {}", t);
// Find and call the actual destructor
let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did, t, class_did, substs);
// Class dtors have no explicit args, so the params should
// just consist of the environment (self).
let params = unsafe {
let ty = Type::from_ref(llvm::LLVMTypeOf(dtor_addr));
ty.element_type().func_params()
};
assert_eq!(params.len(), if type_is_sized(bcx.tcx(), t) { 1 } else { 2 });
// Be sure to put the contents into a scope so we can use an invoke
// instruction to call the user destructor but still call the field
// destructors if the user destructor panics.
//
// FIXME (#14875) panic-in-drop semantics might be unsupported; we
// might well consider changing below to more direct code.
let contents_scope = bcx.fcx.push_custom_cleanup_scope();
// Issue #23611: schedule cleanup of contents, re-inspecting the
// discriminant (if any) in case of variant swap in drop code.
bcx.fcx.schedule_drop_adt_contents(cleanup::CustomScope(contents_scope), v0, t);
let glue_type = get_drop_glue_type(bcx.ccx(), t);
let dtor_ty = bcx.tcx().mk_ctor_fn(class_did, &[glue_type], bcx.tcx().mk_nil());
let (_, bcx) = if type_is_sized(bcx.tcx(), t) {
invoke(bcx, dtor_addr, &[v0], dtor_ty, DebugLoc::None)
} else {
let args = [Load(bcx, expr::get_dataptr(bcx, v0)), Load(bcx, expr::get_len(bcx, v0))];
invoke(bcx, dtor_addr, &args, dtor_ty, DebugLoc::None)
};
bcx.fcx.pop_and_trans_custom_cleanup_scope(bcx, contents_scope)
}
pub fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: ValueRef)
-> (ValueRef, ValueRef) {
debug!("calculate size of DST: {}; with lost info: {}",
t, bcx.val_to_string(info));
if type_is_sized(bcx.tcx(), t) {
let sizing_type = sizing_type_of(bcx.ccx(), t);
let size = C_uint(bcx.ccx(), llsize_of_alloc(bcx.ccx(), sizing_type));
let align = C_uint(bcx.ccx(), align_of(bcx.ccx(), t));
return (size, align);
}
match t.sty {
ty::TyStruct(id, substs) => {
let ccx = bcx.ccx();
// First get the size of all statically known fields.
// Don't use type_of::sizing_type_of because that expects t to be sized.
assert!(!t.is_simd(bcx.tcx()));
let repr = adt::represent_type(ccx, t);
let sizing_type = adt::sizing_type_of(ccx, &*repr, true);
let sized_size = C_uint(ccx, llsize_of_alloc(ccx, sizing_type));
let sized_align = C_uint(ccx, llalign_of_min(ccx, sizing_type));
// Recurse to get the size of the dynamically sized field (must be
// the last field).
let fields = bcx.tcx().struct_fields(id, substs);
let last_field = fields[fields.len()-1];
let field_ty = last_field.mt.ty;
let (unsized_size, unsized_align) = size_and_align_of_dst(bcx, field_ty, info);
// Return the sum of sizes and max of aligns.
let size = Add(bcx, sized_size, unsized_size, DebugLoc::None);
let align = Select(bcx,
ICmp(bcx,
llvm::IntULT,
sized_align,
unsized_align,
DebugLoc::None),
sized_align,
unsized_align);
(size, align)
}
ty::TyTrait(..) => {
// info points to the vtable and the second entry in the vtable is the
// dynamic size of the object.
let info = PointerCast(bcx, info, Type::int(bcx.ccx()).ptr_to());
let size_ptr = GEPi(bcx, info, &[1]);
let align_ptr = GEPi(bcx, info, &[2]);
(Load(bcx, size_ptr), Load(bcx, align_ptr))
}
ty::TySlice(_) | ty::TyStr => {
let unit_ty = t.sequence_element_type(bcx.tcx());
// The info in this case is the length of the str, so the size is that
// times the unit size.
let llunit_ty = sizing_type_of(bcx.ccx(), unit_ty);
let unit_align = llalign_of_min(bcx.ccx(), llunit_ty);
let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty);
(Mul(bcx, info, C_uint(bcx.ccx(), unit_size), DebugLoc::None),
C_uint(bcx.ccx(), unit_align))
}
_ => bcx.sess().bug(&format!("Unexpected unsized type, found {}", t))
}
}
fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, g: DropGlueKind<'tcx>)
-> Block<'blk, 'tcx> {
let t = g.ty();
let skip_dtor = match g { DropGlueKind::Ty(_) => false, DropGlueKind::TyContents(_) => true };
// NB: v0 is an *alias* of type t here, not a direct value.
let _icx = push_ctxt("make_drop_glue");
// Only drop the value when it ... well, we used to check for
// non-null, (and maybe we need to continue doing so), but we now
// must definitely check for special bit-patterns corresponding to
// the special dtor markings.
let inttype = Type::int(bcx.ccx());
let dropped_pattern = C_integral(inttype, adt::dtor_done_usize(bcx.fcx.ccx) as u64, false);
match t.sty {
ty::TyBox(content_ty) => {
// Support for TyBox is built-in and its drop glue is
// special. It may move to library and have Drop impl. As
// a safe-guard, assert TyBox not used with TyContents.
assert!(!skip_dtor);
if !type_is_sized(bcx.tcx(), content_ty) {
let llval = GEPi(bcx, v0, &[0, abi::FAT_PTR_ADDR]);
let llbox = Load(bcx, llval);
let llbox_as_usize = PtrToInt(bcx, llbox, Type::int(bcx.ccx()));
let drop_flag_not_dropped_already =
ICmp(bcx, llvm::IntNE, llbox_as_usize, dropped_pattern, DebugLoc::None);
with_cond(bcx, drop_flag_not_dropped_already, |bcx| {
let bcx = drop_ty(bcx, v0, content_ty, DebugLoc::None);
let info = GEPi(bcx, v0, &[0, abi::FAT_PTR_EXTRA]);
let info = Load(bcx, info);
let (llsize, llalign) = size_and_align_of_dst(bcx, content_ty, info);
// `Box<ZeroSizeType>` does not allocate.
let needs_free = ICmp(bcx,
llvm::IntNE,
llsize,
C_uint(bcx.ccx(), 0u64),
DebugLoc::None);
with_cond(bcx, needs_free, |bcx| {
trans_exchange_free_dyn(bcx, llbox, llsize, llalign, DebugLoc::None)
})
})
} else {
let llval = v0;
let llbox = Load(bcx, llval);
let llbox_as_usize = PtrToInt(bcx, llbox, inttype);
let drop_flag_not_dropped_already =
ICmp(bcx, llvm::IntNE, llbox_as_usize, dropped_pattern, DebugLoc::None);
with_cond(bcx, drop_flag_not_dropped_already, |bcx| {
let bcx = drop_ty(bcx, llbox, content_ty, DebugLoc::None);
trans_exchange_free_ty(bcx, llbox, content_ty, DebugLoc::None)
})
}
}
ty::TyStruct(did, substs) | ty::TyEnum(did, substs) => {
let tcx = bcx.tcx();
match (tcx.ty_dtor(did), skip_dtor) {
(ty::TraitDtor(dtor, true), false) => {
// FIXME(16758) Since the struct is unsized, it is hard to
// find the drop flag (which is at the end of the struct).
// Lets just ignore the flag and pretend everything will be
// OK.
if type_is_sized(bcx.tcx(), t) {
trans_struct_drop_flag(bcx, t, v0, dtor, did, substs)
} else {
// Give the user a heads up that we are doing something
// stupid and dangerous.
bcx.sess().warn(&format!("Ignoring drop flag in destructor for {}\
because the struct is unsized. See issue\
#16758", t));
trans_struct_drop(bcx, t, v0, dtor, did, substs)
}
}
(ty::TraitDtor(dtor, false), false) => {
trans_struct_drop(bcx, t, v0, dtor, did, substs)
}
(ty::NoDtor, _) | (_, true) => {
// No dtor? Just the default case
iter_structural_ty(bcx, v0, t, |bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
}
}
}
ty::TyTrait(..) => {
// No support in vtable for distinguishing destroying with
// versus without calling Drop::drop. Assert caller is
// okay with always calling the Drop impl, if any.
assert!(!skip_dtor);
let data_ptr = GEPi(bcx, v0, &[0, abi::FAT_PTR_ADDR]);
let vtable_ptr = Load(bcx, GEPi(bcx, v0, &[0, abi::FAT_PTR_EXTRA]));
let dtor = Load(bcx, vtable_ptr);
Call(bcx,
dtor,
&[PointerCast(bcx, Load(bcx, data_ptr), Type::i8p(bcx.ccx()))],
None,
DebugLoc::None);
bcx
}
_ => {
if bcx.fcx.type_needs_drop(t) {
iter_structural_ty(bcx,
v0,
t,
|bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
} else {
bcx
}
}
}
}<|fim▁end|>
|
And(bcx, not_init, not_done, DebugLoc::None);
with_cond(bcx, drop_flag_neither_initialized_nor_cleared, |cx| {
let llfn = cx.ccx().get_intrinsic(&("llvm.debugtrap"));
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
logbook.testsuite
~~~~~~~~~~~~~~~~~
The logbook testsuite.
:copyright: (c) 2010 by Armin Ronacher, Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import sys
import unittest
import logbook
_skipped_modules = []
_missing = object()
_func_ident = lambda f: f
_func_none = lambda f: None
class LogbookTestSuite(unittest.TestSuite):
def run(self, result):
try:
return unittest.TestSuite.run(self, result)
finally:
sys.stderr.write('\n')
for mod in _skipped_modules:
msg = '*** Failed to import %s, tests skipped.\n' % mod
sys.stderr.write(msg)
class LogbookTestCase(unittest.TestCase):
def setUp(self):
self.log = logbook.Logger('testlogger')
# silence deprecation warning displayed on Py 3.2
LogbookTestCase.assert_ = LogbookTestCase.assertTrue
def make_fake_mail_handler(**kwargs):
class FakeMailHandler(logbook.MailHandler):
mails = []
def get_connection(self):
return self
def close_connection(self, con):
pass
def sendmail(self, fromaddr, recipients, mail):
self.mails.append((fromaddr, recipients, mail))
kwargs.setdefault('level', logbook.ERROR)
return FakeMailHandler('[email protected]', ['[email protected]'], **kwargs)
def skip_if(condition):
if condition:
return _func_ident
else:
return _func_none
def require(name):
if name in _skipped_modules:
return _func_none
try:
__import__(name)
except ImportError:
_skipped_modules.append(name)
return _func_none
return _func_ident
def missing(name):
def decorate(f):
def wrapper(*args, **kwargs):
old = sys.modules.get(name, _missing)
sys.modules[name] = None
try:
f(*args, **kwargs)
finally:
if old is _missing:
del sys.modules[name]
else:<|fim▁hole|> return decorate
def suite():
loader = unittest.TestLoader()
suite = LogbookTestSuite()
suite.addTests(loader.loadTestsFromName('logbook.testsuite.test_regular'))
if sys.version_info >= (2, 5):
suite.addTests(loader.loadTestsFromName
('logbook.testsuite.test_contextmanager'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')<|fim▁end|>
|
sys.modules[name] = old
return wrapper
|
<|file_name|>test_discovery.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from turnstile.checks import get_checks
from turnstile.manager import get_commands
CORE_COMMIT_MSG_CHECKS = ['branch_pattern', 'branch_release', 'branch_type', 'protect_master', 'specification']
CORE_SUBCOMMANDS = ['config', 'install', 'remove', 'specification', 'upgrade', 'version']
def test_checks():
checks = dict(get_checks('commit_msg'))
for check_name in CORE_COMMIT_MSG_CHECKS:
assert check_name in checks
<|fim▁hole|>
def test_subcommands():
subcommands = dict(get_commands())
for subcommand_name in CORE_SUBCOMMANDS:
assert subcommand_name in subcommands<|fim▁end|>
| |
<|file_name|>recovery_journal_chapters.py<|end_file_name|><|fim▁begin|>#utils
from struct import pack
from struct import unpack
def timestamp_compare(x, y):
if x[1]>y[1]:
return 1
elif x[1]==y[1]:
return 0
else: # x[1]<y[1]
return -1
def reverse_timestamp(x, y):
return y[1]-x[1]
class Note(object):
def note_on(self, note_num, velocity, recommand=1, marker_s=0):
""" A.6.3 rfc 4695
0 1
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|S| NOTENUM |Y| VELOCITY |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
"""
#S
marker_s = marker_s << 7
#NOTENUM (unique in the chapter) 0->127
first = marker_s | note_num
#Y bit of recommandation to play 1 or to skip 0
marker_y = recommand << 7
#Velocity 1->100 ( velocity is never 0
#or the note is coded as NoteOff bitfields
second = marker_y | velocity
return pack('!BB', first, second)
def parse_note_on(self, note):
first, second = unpack('!BB', note)
marker_s = first >> 7
note_num = first&127
marker_y = second >> 7
velocity = second&127
return (marker_s, note_num, marker_y, velocity)
def note_off(self, notes, low, high):
"""OFFBITS of 1 octet, each OFFBITS code NoteOff informations for 8
consecutive MIDI
note number based on HIGH and LOW for the correspondance with note
number"""
#trick to encode in a simple way
for i in range(len(notes)):
notes[i] += 1
#getting number of offbits
nb_offbits = high - low + 1
pack_algo = '!'
#determine render form
for i in range(nb_offbits):
pack_algo += 'B'
#writting offbits
offbits_l = []
for i in range(len(notes)):
#decallage pour chaque bit
decallage = 8 - notes[i]%8
decallage = decallage % 8
#Calcul de l'emplacement
emplacement = notes[i] - (low * 8) - 1
emplacement = emplacement /8
try:
#Try the emplacement
_res = offbits_l[emplacement]
except IndexError:
while len(offbits_l) < emplacement:
offbits_l.append(0)
offbits_l.append(1<<decallage)
else:
offbits_l[emplacement] |= 1<<decallage
p = pack(pack_algo, *offbits_l)
return p
def parse_note_off(self, notes, low, high):
note_l = []
nb = high - low + 1
unpack_algo = '!'
#nb octets where note off are coded
for i in range(nb):
unpack_algo += 'B'
#unpacking
offbits = unpack(unpack_algo, notes)
#for each offbits getting note off coded
for i in range(len(offbits)):
o_b = offbits[i]
j = 7
while j >= 0:
#MSB coded the lowest pitch
if (o_b&(2**j) and 1 or 0) == 1 :
#Note Off present
note = 8 - j + i*8
note = note + (low*8)
#notes -1 to compense +1 in creation
note_l.append([128, note-1, 100])
j -= 1
return note_l
##
#Chapters
##
class Chapter(object):
def __init__(self):
self.content = ""
self.highest = 0
def update(self, new_data):
raise NotImplementedError
def trim(self, new_checkpoint):
raise NotImplementedError
class ChapterP(Chapter):
"""
0 1 2
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|S| PROGRAM |B| BANK-MSB |X| BANK-LSB |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure A.2.1 -- Chapter P format
"""
def __init__(self):
#List of programs
#((prog_val, bank_msb, bank_lsb), packet_num)
Chapter.__init__(self)
self.prog = ((0, 0, 0), 0)
self.marker_x = 0
self.marker_b = 0
def update(self, programs):
#only prog
if len(programs) == 1:
val = programs[0][0][1]
seq = programs[0][1]
self.prog = ((val, 0, 0), seq)
#Update content and highest
self.update_highest()
self.build()
else:
#msb lsb thing
msb = 0
lsb = 0
for i in range(len(programs)):
if (programs[i][0][0]>>4)==12:
program = programs[i][0][1]
elif (programs[i][0][0]>>4)==11:
if programs[i][0][1] == 0:
self.marker_b = 1
msb = programs[i][0][2]
elif programs[i][0][1] == 32:
lsb = programs[i][0][2]
elif programs[i][0][0]==0 and programs[i][0][1]==0 \
and programs[i][0][2]==0 and programs[i][1]==0:
self.marker_x = 1
seq = programs[0][1]
self.prog = ((program, msb, lsb), seq)
#Update content and highest
self.update_highest()
self.build()
def trim(self, checkpoint):
if self.highest <= checkpoint:
self.highest = 0
self.content = ""
self.prog = ((0, 0, 0), 0)
#Update content and highest
self.update_highest()
self.build()
def build(self):
program = self.prog[0][0]
bank_msb = self.prog[0][1]
bank_lsb = self.prog[0][2]
if program==0 and bank_msb==0 and bank_lsb==0:
self.content = ""
else:
marker_s = 1 << 7
#Program max 127
first = marker_s | program
#This field are only set if an 0Xb appear before the program
#change for the controller 0
#( bank_msb = control chang command )
marker_b = self.marker_b << 7
#BANK_MSB max 127
second = marker_b | bank_msb
marker_x = self.marker_x << 7
#BANK_LSB max 127
third = marker_x | bank_lsb
self.content = pack('!BBB', first, second, third)
def parse(self, chapterp):
first, second, third = unpack('!BBB', chapterp)
marker_s = first >> 7
program = first&127
marker_b = second >> 7
bank_msb = second&127
marker_x = third >> 7
bank_lsb = third&127
midi_cmd = []
midi_cmd.append([192, program, 0])
if marker_b == 1:
midi_cmd.append([176, 0, bank_msb])
midi_cmd.append([176, 32, bank_lsb])
#marker_x is only important if using 0 and 32 in a non standard way.
return 3, midi_cmd, marker_s, marker_x, marker_b
def update_highest(self):
#Getting higest from data list
if self.prog[0][0]!=0 :
self.highest = self.prog[1]
else:
self.highest = 0
class ChapterC(Chapter):
"""
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 8 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|S| LEN |S| NUMBER |A| VALUE/ALT |S| NUMBER |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|A| VALUE/ALT | .... |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure A.3.1 -- Chapter C format
"""
def __init__(self):
self.highest = 0
self.content = ""
#COntroller format
#((number, value), packet_num, encoded)
self.controllers = []
def header(self, length, marker_s=0):
marker_s = marker_s << 7
#L max 127
return pack('!B', marker_s | length)
def parse_header(self, header):
header = unpack('!B', header)
marker_s = header[0] >> 7
length = header[0]&127
return marker_s, length
def update(self, controllers):
for i in range(len(controllers)):
controllers_ind = [ controller[0][0] for controller
in self.controllers ]
#tmp
pitch = controllers[i][0][1]
vel = controllers[i][0][2]
seq = controllers[i][1]
if not pitch in controllers_ind:
encoded = self.create_log_c(0, pitch, 0,vel)
self.controllers.append(((pitch, vel), seq, encoded))
else:
ind = controllers_ind.index(pitch)
encoded = self.create_log_c(0, pitch, 0,vel)
self.controllers[ind] = ((pitch, vel), seq, encoded)
#Update chapter and content
self.update_highest()
self.build()
def build(self):
"""ChapterC creation from controllers list"""
length = 0
self.content = ""
for controller in self.controllers:
length += 1
self.content += controller[2]
header = self.header( length, 0)
self.content = header + self.content
def trim(self, checkpoint):
if self.highest > 0:
self.controllers = [controller for controller in self.controllers
if controller[1] > checkpoint]
#Update chapter and content
self.update_highest()
self.build()
def create_log_c(self, marker_s, number, marker_a, value):
marker_s = marker_s << 7
first = marker_s | number
#TODO marker maagement (for toggle / pedal controllers)
marker_a = marker_a << 7
second = marker_a | value
return pack('!BB', first, second)
def parse_log_c(self,data):
first, second = unpack('!BB', data)
marker_s = first>>7
number = first&127
marker_a = second>>7
value = second&127
return marker_s, number, marker_a, value
def parse(self, chapter):
"""Parsing chapterC"""
marker_s, length = self.parse_header(chapter[:1])
chap = chapter[1:]
size = 1 + 2 * length
midi_cmd = []
for i in range(length):
current = self.parse_log_c(chap[2*i:2*i+2])
#TODO take care marker_s and A
control_cmd = [176, current[1], current[3]]
midi_cmd.append(control_cmd)
return size, midi_cmd, marker_s
def update_highest(self):
#Getting higest from data list
if len(self.controllers) > 0:
decorate = [data[1] for data in self.controllers]
decorate.sort(reverse=True)
self.highest = decorate[0]
else:
self.highest = 0
class ChapterW(Chapter):
"""
0 1
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|S| FIRST |R| SECOND |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure A.5.1 -- Chapter W format
Pitch Wheel information / midi => 0xE
"""
def __init__(self):
Chapter.__init__(self)
#Format (wheel, seqnum)
self.data_list = ((0, 0), (0, 0))
def trim(self, checkpoint):
if self.highest <= checkpoint:
self.content = ""
self.highest = 0
self.data_list = ((0, 0), (0, 0))
else:
if self.data_list[0][1] <= checkpoint:
self.data_list = ((0,0), \
(self.data_list[1][0], self.data_list[1][1]))
if self.data_list[1][1] <= checkpoint:
self.data_list = ((self.data_list[0][0], self.data_list[0][1]), \
(0, 0))
#Update Highest
self.update_highest()
self.build()
def update(self, wheels):
#S inform that the recovery for packet I coded information
#from packet I-1
i = 0
for wheel in wheels:
#First wheel (TODO commen differencier wheel 1 / 2
if i == 0:
self.data_list = ((wheel[0][2], wheel[1]), \
(self.data_list[1][0], self.data_list[1][1]))
else:
self.data_list = ((self.data_list[0][0], self.data_list[0][1]), \
(wheel[0][2], wheel[1]))
i += 1
#Updating highest and content
self.update_highest()
self.build()
def build(self):
wheel_1 = self.data_list[0][0]
if wheel_1 != 0:
wheel_2 = self.data_list[1][0]
single = 1
mark_s = single << 7
first = mark_s | wheel_1
#R is for futur use Receiver must ignore it
mark_r = 0 << 7
second = mark_r | wheel_2
self.content = pack('!BB', first, second)
else:
self.content = ""
def parse(self, chapter_w):
first, second = unpack('!BB', chapter_w[:2])
midi_cmd = []
mark_s = first&128 and 1 or 0
wheel_1 = first&127
wheel_2 = second&127
#TODO verfi format
midi_cmd.append( [224, 0, wheel_1])
midi_cmd.append( [224, 0, wheel_2])
return 2, midi_cmd, mark_s
def update_highest(self):
#Getting higest from data list
if self.data_list[0][0]!=0 :
if self.data_list[1][0]!=0:
if self.data_list[0][1] >= self.data_list[1][1]:
self.highest = self.data_list[0][1]
else:
self.highest = self.data_list[1][1]
else:
self.highest = self.data_list[0][1]
else:
self.highest = 0
class ChapterN(Chapter):
def __init__(self):
Chapter.__init__(self)
#Keep up to date??
self.state = 0
#For header
self.low = 0
self.high = 0
self.note_off_presence = 0
#List of notes
#((note_val, note_vel), packet_num, encoded)
self.note_on = []
#(note_val, packet_num)
self.note_off = []
self.note = Note()
def header(self):
"""A.6.1 rfc 4695
0 1
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|B| LEN | LOW | HIGH |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Note log list obey the oldest-first ordering
"""
length = len(self.note_on)
low = self.low
high = self.high
#B is set to 0 if chapter contain NoteOff else 1
#( si B == 0 , S upper level must be set to 0 )
marker_b = 0 << 7
#LEN number of notelog in the list jusque 127 notes
first = marker_b | length
#LOW et high sont la pour indique le nombre d'OFFBITS
#if LOW <= HIGH there are HIGH - LOW + 1 OFFBITS
#HIGH = biggest_notenum_of_noteoff_present
#LOW = lowest_notenum_of_noteoff_present
low = low << 4
second = low | high
if first > 255 or first < 0:
print " problem with first " + str(first) + " length: " \
+ str(length)
return pack('!BB', first, second)
def parse_header(self, header):
first, second = unpack('!BB', header)
marker_b = first&128 and 1 or 0
marker_l = first&127
low = (second&240) >> 4
high = second&15
return (marker_b, marker_l, low, high)
def eval_low_high(self):
"""
Evaluate low and high marker for note off
"""
#Getting list of noteOff => lowwest and highest noteOff
note_off = [note[0] for note in self.note_off]
#setting low and high for offbits
if len(note_off) > 0:
note_off.sort()
#set high(+1 for the trick)
if (note_off[-1]+1) % 8 == 0 :
self.high = (note_off[-1]) / 8
else:
self.high = (note_off[-1]+1) / 8
#set low
self.low = note_off[0] / 8
else:
self.low = 0
self.high = 0
def update(self, notes):
#Index of notes off
note_off = [ note[0] for note in self.note_off ]
#Splitting notes
new_note_on = [ (note[0][1], note) for note in notes
if note[0][0]&240 == 144
and note[0][2] > 0 ]
new_note_off = [ (note[0][1], note[1]) for note in notes
if note[0][0]&240 == 128
or note[0][2] == 0 ]
new_note_off_ind = [ note[0] for note in new_note_off ]
#Checking notes (note off exclusion)
new_valid_note_on = [ note[1] for note in new_note_on
if not note[0] in note_off and
not note[0] in new_note_off_ind]
#Updating note on of chapter based on new note off
self.note_on = [ note for note in self.note_on
if not note[0][0] in note_off and
not note[0][0] in new_note_off_ind ]
#Adding note on
for note_on in new_valid_note_on:
#Index of notes on
note_on_l = [ note[0][0] for note in self.note_on ]
#tmp
note_num = note_on[0][1]<|fim▁hole|> #cmd = note_on[0][0]&240
seq = note_on[1]
if note_num in note_on_l:
#Replacing Note
ind = note_on_l.index(note_num)
encoded = self.note.note_on(note_num, velocity)
self.note_on[ind] = ((note_num, velocity), seq, encoded)
self.state = 1
else:
#Add Newone
encoded = self.note.note_on(note_num, velocity)
self.note_on.append(((note_num, velocity), seq, encoded))
self.state = 1
#Adding note_off
for note_off in new_note_off:
note_off_l = [ note[0] for note in self.note_off ]
if not note_off[0] in note_off_l:
#Add note off
self.note_off.append((note_off[0], note_off[1]))
self.state = 1
else:
#Updating seq num
ind = note_off_l.index(note_off[0])
self.note_off[ind] = (note_off[0], note_off[1])
self.state = 1
#Update Highest
self.update_highest()
#Rebuilding the packet
self.build()
def trim(self, checkpoint):
if self.highest > 0:
self.note_on = [note for note in self.note_on if note[1] > checkpoint]
self.note_off = [note for note in self.note_off if note[1] > checkpoint]
self.state = 1
#Update Highest
self.update_highest()
#Rebuilding content
self.build()
def build(self):
"""
format waited for midiCmd :
list of [[Event, Note, Velocity], Time]
"""
chapter_note_on = ""
chapter_note_off = ""
note_off_presence = 0
self.eval_low_high()
#Note off part
if len(self.note_off) > 0:
note_off = [ note[0] for note in self.note_off ]
chapter_note_off = self.note.note_off(note_off, self.low, self.high)
note_off_presence = 1
note_on = [ note[2] for note in self.note_on ]
chapter_note_on = ''.join(note_on)
#complete chapterN
chapter_n = chapter_note_on + chapter_note_off
#real_len = len(self.note_on) * 2 + ( self.high - self.low + 1 )
#building chapter
header = self.header()
chapter_n = header + chapter_n
#Save res
self.content = chapter_n
self.note_off_presence = note_off_presence
def parse(self, chapter):
note = Note()
extract_header = chapter[:2]
size = 2
header = self.parse_header(extract_header)
nb_note_on = header[1]
size += 2 * nb_note_on
#print "total len ???? ", str(2+2*nb_note_on+)
#len in header of the chapter
real_len = nb_note_on * 2 + ( header[3] - header[2] + 1 )
#chapter
extract_chapter = chapter[2:2+real_len+1]
#Getting note On
note_list = []
for i in range(nb_note_on):
note_n = note.parse_note_on(extract_chapter[2*i:2+(i*2)])
note_list.append([144, note_n[1], note_n[3]])
#if there is note off
if header[3] - header[2] >= 0 and header[3] != 0:
size += header[3] - header[2] + 1
note_off = note.parse_note_off(extract_chapter[nb_note_on*2:],
header[2], header[3])
else:
note_off = []
return size, note_list + note_off
def update_highest(self):
#Getting higest from data list
data_list = self.note_on + self.note_off
if len(data_list) > 0:
decorate = [data[1] for data in data_list]
decorate.sort(reverse=True)
self.highest = decorate[0]
else:
self.highest = 0
class ChapterE(object):
"""Chapter E (note extras (double notes, ...))"""
pass
class ChapterT(Chapter):
"""Chapter T (After Touch)
0
0 1 2 3 4 5 6 7
+-+-+-+-+-+-+-+-+
|S| PRESSURE |
+-+-+-+-+-+-+-+-+
Figure A.8.1 -- Chapter T format
"""
def __init__(self):
Chapter.__init__(self)
def update(self, after):
after = after[0]
marker_s = 1
pressure = after[0][1]
self.highest = after[1]
marker_s = marker_s << 7
chap_t = marker_s | pressure
res = pack('!B', chap_t)
self.content = res
def trim(self, checkpoint):
if self.highest <= checkpoint:
self.content = ""
self.highest = 0
def parse(self, chap_t):
size = 1
midi_cmd = []
chap_t_parsed = unpack('!B', chap_t[0])
#marker_s = chap_t_parsed[0] >> 7
pressure = chap_t_parsed[0]&127
midi_cmd.append([208, pressure, 0])
return size, midi_cmd
class ChapterA(Chapter):
"""Chapter A (Poly After Touch)
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 8 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|S| LEN |S| NOTENUM |X| PRESSURE |S| NOTENUM |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|X| PRESSURE | .... |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure A.9.1 -- Chapter A format
"""
def __init__(self):
Chapter.__init__(self)
#Format ((pitch, velocity), seq_num, encoded)
self.data_list = []
def header(self, marker_s, length):
"""Header Creation for Chapter A
Marker S is set to when encoding packetNum - 1 ???
"""
#Marker S for packet - 1 encoding
marker_s = marker_s << 7
length -= 1
chap_a = marker_s | length
header = pack ('!B', chap_a)
return header
def parse_header(self, header):
header_parsed = unpack('!B', header)
marker_s = header_parsed[0] >> 7
length = header_parsed[0]&127
length += 1
return marker_s, length
def create_log_a(self, marker_s, notenum, marker_x, pressure):
"""Log Creation for Chapter A
Marker X == 1, if the command coded by the log appears before one of the
following commands in the session history: MIDI Control Change
numbers 123-127 (numbers with All Notes Off semantics) or 120 (All
Sound Off).
"""
marker_s = marker_s << 7
first = marker_s | notenum
marker_x = marker_x << 7
second = marker_x | pressure
log_a = pack ('!BB', first, second)
return log_a
def parse_log_a(self, log_a):
first, second = unpack('!BB', log_a)
marker_s = first >> 7
notenum = first&127
marker_x = second >> 7
pressure = second&127
return marker_s, notenum, marker_x, pressure
def update(self, midi_cmd):
"""Chapter A creation
"""
#timestamp = 1 if marker X
#timestamp = 1 << 1 marker S
#chapter_p = ""
known_pitch = [data[0][0] for data in self.data_list]
for i in range(len(midi_cmd)):
marker_x = 0
marker_s = 0
if (midi_cmd[i][1]>>1):
marker_s = 1
if (midi_cmd[i][1]&1):
marker_x = 1
#Encoding
encoded = self.create_log_a(marker_s, midi_cmd[i][0][1], marker_x,
midi_cmd[i][0][2])
#Test existance
if not midi_cmd[i][0][1] in known_pitch:
#Adding
self.data_list.append(((midi_cmd[i][0][1], midi_cmd[i][0][2],
marker_s, marker_x), midi_cmd[i][1], encoded))
known_pitch = [data[0][0] for data in self.data_list]
else:
#Replace
ind = known_pitch.index(midi_cmd[i][0][1])
self.data_list[ind] = ((midi_cmd[i][0][1], midi_cmd[i][0][2],
marker_s, marker_x), midi_cmd[i][1], encoded)
known_pitch = [data[0][0] for data in self.data_list]
self.update_highest()
self.build()
def build(self):
self.content = ""
for data in self.data_list:
self.content += data[2]
marker_s = 1
header = self.header(marker_s, len(self.data_list))
self.content = header + self.content
def trim(self, checkpoint):
self.data_list = [data for data in self.data_list if data[1] > checkpoint]
if len(self.data_list) > 0:
self.update_highest()
self.build()
else:
self.content = ""
self.highest = 0
def update_highest(self):
if len(self.data_list) > 0:
decorate = [data[1] for data in self.data_list ]
decorate.sort(reverse=True)
self.highest = decorate[1]
else:
self.highest = 0
def parse(self, chapter_a):
"""Parse function for Chapter A"""
marker_s, length = self.parse_header(chapter_a[:1])
midi_cmd = []
size = 1
chapter_a_parsed = chapter_a[1:2*length+1]
for i in range(length):
#TODO take care of marker X and Marker S
marker_s_tmp, notenum, marker_x, pressure \
= self.parse_log_a(chapter_a_parsed[2*i:2*i+2])
midi_cmd.append( [160, notenum, pressure])
size += 2
return size, marker_s, midi_cmd<|fim▁end|>
|
velocity = note_on[0][2]
|
<|file_name|>0003_auto_20141104_2232.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
<|fim▁hole|>
class Migration(migrations.Migration):
dependencies = [
('blog', '0002_auto_20141029_1945'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, verbose_name='ID', serialize=False)),
('name', models.CharField(max_length=32)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='article',
name='tags',
field=models.ManyToManyField(related_name='articles', to='blog.Tag', blank=True),
preserve_default=True,
),
]<|fim▁end|>
| |
<|file_name|>app.module.js<|end_file_name|><|fim▁begin|>"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
//Core
require('../../rxjs-extensions');
var core_1 = require('@angular/core');
var platform_browser_1 = require('@angular/platform-browser');
var forms_1 = require('@angular/forms');
var http_1 = require('@angular/http');
var app_routing_1 = require('./app.routing');
//Services
//Common Components
var page_footer_component_1 = require('../userinterface/page-footer.component');
var page_header_component_1 = require('../userinterface/page-header.component');
//Module Components
var app_component_1 = require('./app.component');
var home_content_component_1 = require('./home-content.component');
var AppModule = (function () {
function AppModule() {
}
AppModule = __decorate([
core_1.NgModule({
imports: [
platform_browser_1.BrowserModule,
forms_1.FormsModule,
forms_1.ReactiveFormsModule,
http_1.HttpModule,
app_routing_1.routing
],
declarations: [
app_component_1.AppComponent,
page_footer_component_1.PageFooterComponent,
page_header_component_1.PageHeaderComponent,
home_content_component_1.HomeContentComponent
],
bootstrap: [app_component_1.AppComponent]
}), <|fim▁hole|> ], AppModule);
return AppModule;
}());
exports.AppModule = AppModule;
//# sourceMappingURL=app.module.js.map<|fim▁end|>
|
__metadata('design:paramtypes', [])
|
<|file_name|>croc-hunter.go<|end_file_name|><|fim▁begin|>// The infamous "croc-hunter" game as featured at many a demo
package main
import (
"flag"
"fmt"
"log"
"net/http"
"os"
)
func main() {
httpListenAddr := flag.String("port", "8080", "HTTP Listen address.")
flag.Parse()
log.Println("Starting server...")
// point / at the handler function
http.HandleFunc("/", handler)
// serve static content from /static
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir("static/"))))
log.Println("Server started. Listening on port " + *httpListenAddr)
log.Fatal(http.ListenAndServe(":"+*httpListenAddr, nil))
}
const (
html = `
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>Croc Hunter Arcade Game</title>
<link rel='stylesheet' href='/static/game.css'/>
<link rel="icon" type="image/png" href="/static/favicon-16x16.png" sizes="16x16" />
<link rel="icon" type="image/png" href="/static/favicon-32x32.png" sizes="32x32" />
</head>
<body>
<canvas id="canvasBg" width="800" height="490" ></canvas>
<canvas id="canvasEnemy" width="800" height="500" ></canvas>
<canvas id="canvasJet" width="800" height="500" ></canvas>
<canvas id="canvasHud" width="800" height="500" ></canvas>
<script src='/static/game2.js'></script>
<div class="details">
<strong>Hostname: </strong>%s<br>
<strong>Release: </strong>%s<br>
<strong>Commit: </strong>%s<br>
<strong>Powered By: </strong>%s<br>
</div>
</body>
</html>
`
)
func handler(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/healthz" {
w.WriteHeader(http.StatusOK)
return
}
hostname, err := os.Hostname()
if err != nil {
log.Fatalf("could not get hostname: %s", err)
}
release := os.Getenv("WORKFLOW_RELEASE")
commit := os.Getenv("GIT_SHA")
powered := os.Getenv("POWERED_BY")
if release == "" {
release = "unknown"
}
if commit == "" {
commit = "not present"
}
if powered == "" {
powered = "SysEleven"
}
<|fim▁hole|><|fim▁end|>
|
fmt.Fprintf(w, html, hostname, release, commit, powered)
}
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// src/cpu/mod.rs
// Copyright 2016 Alexis Williams
//
// Licensed under the MIT License <http://opensource.org/licenses/MIT>.
// CPU-specific modules
#[cfg(all(target_arch = "thumbv7a", feature = "cpu-am335x"))]
pub use self::am335x::*;
#[macro_use]<|fim▁hole|><|fim▁end|>
|
pub mod generic;
pub mod am335x;
|
<|file_name|>interfaceorg_1_1onosproject_1_1net_1_1behaviour_1_1PortAdmin.js<|end_file_name|><|fim▁begin|>var interfaceorg_1_1onosproject_1_1net_1_1behaviour_1_1PortAdmin =<|fim▁hole|>];<|fim▁end|>
|
[
[ "enable", "interfaceorg_1_1onosproject_1_1net_1_1behaviour_1_1PortAdmin.html#a4fa39b1dc477558945e9211986dabcf3", null ]
|
<|file_name|>test_utils_request.py<|end_file_name|><|fim▁begin|>import unittest
from scrapy.http import Request
from scrapy.utils.request import request_fingerprint, _fingerprint_cache, \
request_authenticate, request_httprepr
class UtilsRequestTest(unittest.TestCase):
def test_request_fingerprint(self):
r1 = Request("http://www.example.com/query?id=111&cat=222")
r2 = Request("http://www.example.com/query?cat=222&id=111")
self.assertEqual(request_fingerprint(r1), request_fingerprint(r1))
self.assertEqual(request_fingerprint(r1), request_fingerprint(r2))
r1 = Request('http://www.example.com/hnnoticiaj1.aspx?78132,199')
r2 = Request('http://www.example.com/hnnoticiaj1.aspx?78160,199')
self.assertNotEqual(request_fingerprint(r1), request_fingerprint(r2))
# make sure caching is working
self.assertEqual(request_fingerprint(r1), _fingerprint_cache[r1][(None, False)])
r1 = Request("http://www.example.com/members/offers.html")
r2 = Request("http://www.example.com/members/offers.html")
r2.headers['SESSIONID'] = b"somehash"
self.assertEqual(request_fingerprint(r1), request_fingerprint(r2))
r1 = Request("http://www.example.com/")
r2 = Request("http://www.example.com/")
r2.headers['Accept-Language'] = b'en'
r3 = Request("http://www.example.com/")
r3.headers['Accept-Language'] = b'en'
r3.headers['SESSIONID'] = b"somehash"
self.assertEqual(request_fingerprint(r1), request_fingerprint(r2), request_fingerprint(r3))
self.assertEqual(request_fingerprint(r1),
request_fingerprint(r1, include_headers=['Accept-Language']))
self.assertNotEqual(request_fingerprint(r1),
request_fingerprint(r2, include_headers=['Accept-Language']))
self.assertEqual(request_fingerprint(r3, include_headers=['accept-language', 'sessionid']),
request_fingerprint(r3, include_headers=['SESSIONID', 'Accept-Language']))
r1 = Request("http://www.example.com/test.html")
r2 = Request("http://www.example.com/test.html#fragment")
self.assertEqual(request_fingerprint(r1), request_fingerprint(r2))
self.assertEqual(request_fingerprint(r1), request_fingerprint(r1, keep_fragments=True))
self.assertNotEqual(request_fingerprint(r2), request_fingerprint(r2, keep_fragments=True))
self.assertNotEqual(request_fingerprint(r1), request_fingerprint(r2, keep_fragments=True))
r1 = Request("http://www.example.com")
r2 = Request("http://www.example.com", method='POST')
r3 = Request("http://www.example.com", method='POST', body=b'request body')
self.assertNotEqual(request_fingerprint(r1), request_fingerprint(r2))
self.assertNotEqual(request_fingerprint(r2), request_fingerprint(r3))
# cached fingerprint must be cleared on request copy
r1 = Request("http://www.example.com")
fp1 = request_fingerprint(r1)
r2 = r1.replace(url="http://www.example.com/other")
fp2 = request_fingerprint(r2)
self.assertNotEqual(fp1, fp2)
def test_request_authenticate(self):
r = Request("http://www.example.com")
request_authenticate(r, 'someuser', 'somepass')
self.assertEqual(r.headers['Authorization'], b'Basic c29tZXVzZXI6c29tZXBhc3M=')
def test_request_httprepr(self):
r1 = Request("http://www.example.com")
self.assertEqual(request_httprepr(r1), b'GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n')
r1 = Request("http://www.example.com/some/page.html?arg=1")
self.assertEqual(request_httprepr(r1), b'GET /some/page.html?arg=1 HTTP/1.1\r\nHost: www.example.com\r\n\r\n')
r1 = Request("http://www.example.com", method='POST', headers={"Content-type": b"text/html"}, body=b"Some body")
self.assertEqual(request_httprepr(r1), b'POST / HTTP/1.1\r\nHost: www.example.com\r\nContent-Type: text/html\r\n\r\nSome body')<|fim▁hole|> # the representation is not important but it must not fail.
request_httprepr(Request("file:///tmp/foo.txt"))
request_httprepr(Request("ftp://localhost/tmp/foo.txt"))
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
def test_request_httprepr_for_non_http_request(self):
|
<|file_name|>cmd_resp.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
use std::error;
use crate::Error;
use kvproto::raft_cmdpb::RaftCmdResponse;
pub fn bind_term(resp: &mut RaftCmdResponse, term: u64) {
if term == 0 {
return;
}
resp.mut_header().set_current_term(term);
}
pub fn bind_error(resp: &mut RaftCmdResponse, err: Error) {
resp.mut_header().set_error(err.into());
}
pub fn new_error(err: Error) -> RaftCmdResponse {
let mut resp = RaftCmdResponse::default();
bind_error(&mut resp, err);
resp
}
pub fn err_resp(e: Error, term: u64) -> RaftCmdResponse {
let mut resp = new_error(e);
bind_term(&mut resp, term);
resp
}
pub fn message_error<E>(err: E) -> RaftCmdResponse<|fim▁hole|>where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
new_error(Error::Other(err.into()))
}<|fim▁end|>
| |
<|file_name|>test_transmuters.py<|end_file_name|><|fim▁begin|># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import warnings
import unittest
import os
from pymatgen.alchemy.transmuters import CifTransmuter, PoscarTransmuter
from pymatgen.alchemy.filters import ContainsSpecieFilter
from pymatgen.transformations.standard_transformations import \
SubstitutionTransformation, RemoveSpeciesTransformation, \
OrderDisorderedStructureTransformation
from pymatgen.transformations.advanced_transformations import \
SuperTransformation
'''
Created on Mar 5, 2012
'''
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "Mar 5, 2012"
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files')<|fim▁hole|>
class CifTransmuterTest(unittest.TestCase):
def setUp(self):
warnings.simplefilter("ignore")
def tearDown(self):
warnings.simplefilter("default")
def test_init(self):
trans = []
trans.append(SubstitutionTransformation({"Fe": "Mn", "Fe2+": "Mn2+"}))
tsc = CifTransmuter.from_filenames([os.path.join(test_dir,
"MultiStructure.cif")],
trans)
self.assertEqual(len(tsc), 2)
expected_ans = set(["Mn", "O", "Li", "P"])
for s in tsc:
els = set([el.symbol
for el in s.final_structure.composition.elements])
self.assertEqual(expected_ans, els)
class PoscarTransmuterTest(unittest.TestCase):
def test_init(self):
trans = []
trans.append(SubstitutionTransformation({"Fe": "Mn"}))
tsc = PoscarTransmuter.from_filenames([os.path.join(test_dir,
"POSCAR"),
os.path.join(test_dir,
"POSCAR")],
trans)
self.assertEqual(len(tsc), 2)
expected_ans = set(["Mn", "O", "P"])
for s in tsc:
els = set([el.symbol
for el in s.final_structure.composition.elements])
self.assertEqual(expected_ans, els)
def test_transmuter(self):
tsc = PoscarTransmuter.from_filenames(
[os.path.join(test_dir, "POSCAR")])
tsc.append_transformation(RemoveSpeciesTransformation('O'))
self.assertEqual(len(tsc[0].final_structure), 8)
tsc.append_transformation(SubstitutionTransformation({"Fe":
{"Fe2+": 0.25,
"Mn3+": .75},
"P": "P5+"}))
tsc.append_transformation(OrderDisorderedStructureTransformation(),
extend_collection=50)
self.assertEqual(len(tsc), 4)
t = SuperTransformation([SubstitutionTransformation({"Fe2+": "Mg2+"}),
SubstitutionTransformation({"Fe2+": "Zn2+"}),
SubstitutionTransformation({"Fe2+": "Be2+"})])
tsc.append_transformation(t, extend_collection=True)
self.assertEqual(len(tsc), 12)
for x in tsc:
self.assertEqual(len(x), 5, 'something might be wrong with the number of transformations in the history') #should be 4 trans + starting structure
#test the filter
tsc.apply_filter(ContainsSpecieFilter(['Zn2+', 'Be2+', 'Mn4+'],
strict_compare=True, AND=False))
self.assertEqual(len(tsc), 8)
self.assertEqual(tsc.transformed_structures[0].as_dict()[
'history'][-1]['@class'], 'ContainsSpecieFilter')
tsc.apply_filter(ContainsSpecieFilter(['Be2+']))
self.assertEqual(len(tsc), 4)
#Test set_parameter and add_tag.
tsc.set_parameter("para1", "hello")
self.assertEqual(tsc.transformed_structures[0]
.as_dict()['other_parameters']['para1'], 'hello')
tsc.add_tags(["world", "universe"])
self.assertEqual(tsc.transformed_structures[0]
.as_dict()['other_parameters']['tags'],
["world", "universe"])
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import logging
import os
import shlex
import unittest
import sys
from toil.common import toilPackageDirPath
from toil.lib.bioio import getBasicOptionParser, parseSuiteTestOptions
log = logging.getLogger(__name__)
class ToilTest(unittest.TestCase):
"""
A common base class for our tests. Please have every test case directly or indirectly inherit this one.
"""
orig_sys_argv = None
def getScriptPath(self, script_name):
return os.path.join(toilPackageDirPath(), 'utils', script_name + '.py')
@classmethod
def setUpClass(cls):
super(ToilTest, cls).setUpClass()
cls.orig_sys_argv = sys.argv[1:]
sys.argv[1:] = shlex.split(os.environ.get('TOIL_TEST_ARGS', ""))
parser = getBasicOptionParser()
options, args = parseSuiteTestOptions(parser)
sys.argv[1:] = args
@classmethod
def tearDownClass(cls):
sys.argv[1:] = cls.orig_sys_argv
super(ToilTest, cls).tearDownClass()
def setUp(self):
log.info("Setting up %s", self.id())
super(ToilTest, self).setUp()
def tearDown(self):<|fim▁hole|><|fim▁end|>
|
super(ToilTest, self).tearDown()
log.info("Tearing down down %s", self.id())
|
<|file_name|>tokeniser.rs<|end_file_name|><|fim▁begin|>//! The _Tokeniser_ class.
#![experimental]
use std::char::is_whitespace;
use escape_scheme::EscapeScheme;
/// A tokeniser object.
///
/// A Tokeniser can be fed characters from an iterator, string, or individually.
/// It is an _immutable_ object: actions on a Tokeniser consume the Tokeniser,
/// and produce a fresh copy of the Tokeniser.
///
/// At any stage, a Tokeniser can be consumed to produce the vector of words
/// it has read, using the `into_strings` method. This method may fail if the
/// Tokeniser ended in a bad state (in the middle of a quoted string, or in
/// the middle of an escape sequence).
#[deriving(Clone)]
pub struct Tokeniser<Q, E, S> {
/// The current vector of parsed words.
vec: Vec<String>,
/// The current tokeniser error, if any.
/// An error ‘poisons’ the tokeniser, causing it to ignore any further
/// input.
error: Option<Error>,
/// Whether or not we are currently in a word.
in_word: bool,
/// The current closing quote character and quote mode, if any.
quote: Option<( char, QuoteMode )>,
/// The current escape scheme in use, if any.
escape: Option<S>,
/// Maps from quote openers to quote closers.
quote_map: Q,
/// Map from escape leader characters to their schemes.
escape_map: E,
}
/// A quote mode.
#[deriving(Clone)]
pub enum QuoteMode {
/// All characters except the closing character have their literal value.
/// This is equivalent to single-quoting in POSIX shell.
IgnoreEscapes,
/// All characters except the closing character and escape sequences
/// have their literal value. This is roughly equivalent to
/// double-quoting in POSIX shell.
ParseEscapes
}
/// A tokeniser error.
///
/// A Tokeniser's `into_strings` method can fail with one of the following
/// errors if called while the Tokeniser is in an unfinished state.
#[deriving(Clone, Eq, PartialEq, Show)]
pub enum Error {
/// A quotation was opened, but not closed.
UnmatchedQuote,
/// An escape sequence was started, but not finished.
UnfinishedEscape,
/// An unknown escape sequence was encountered.
BadEscape
}
impl<Q, E, S> Tokeniser<Q, E, S>
where Q: Map<char, ( char, QuoteMode )>,
E: Map<char, S>,
S: EscapeScheme,
Q: Clone,
E: Clone,
S: Clone,
Q: Collection {
/// Creates a new, blank Tokeniser.
///
/// # Arguments<|fim▁hole|> /// * `escape_map` - A map, mapping escape leader characters to their escape
/// schemes. An empty map disables escapes.
///
/// # Return value
///
/// A new Tokeniser, with an empty state. Attempting to take the
/// string vector of the Tokeniser yields the empty vector.
///
/// # Example
///
/// ```rust
/// use std::collections::hashmap::HashMap;
/// use russet::{ Tokeniser, ParseEscapes, QuoteMode };
/// use russet::{ MapEscape, SimpleEscapeScheme };
///
/// let quote_map: HashMap<char, ( char, QuoteMode )> =
/// vec![ ( '\"', ( '\"', ParseEscapes ) ) ].move_iter().collect();
/// let escape_pairs: HashMap<char, char> =
/// vec![ ( 'n', '\n' ) ].move_iter().collect();
/// let escape_map: HashMap<char, SimpleEscapeScheme<HashMap<char, char>>> =
/// vec![ ( '\\', MapEscape(escape_pairs) )].move_iter().collect();
/// let tok = Tokeniser::new(quote_map, escape_map);
/// assert_eq!(tok.into_strings(), Ok(vec![]));
/// ```
pub fn new(quote_map: Q, escape_map: E) -> Tokeniser<Q, E, S> {
Tokeniser {
vec: vec![ String::new() ],
error: None,
in_word: false,
quote: None,
escape: None,
quote_map: quote_map,
escape_map: escape_map
}
}
/// Feeds a single character `chr` to a Tokeniser.
///
/// # Return value
///
/// A new Tokeniser, representing the state of the Tokeniser after
/// consuming `chr`.
///
/// # Example
///
/// ```rust
/// use russet::whitespace_split_tokeniser;
///
/// let tok = whitespace_split_tokeniser();
/// let tok2 = tok.add_char('a').add_char('b').add_char('c');
/// assert_eq!(tok2.into_strings(), Ok(vec![ "abc".into_string() ]));
/// ```
pub fn add_char(self, chr: char) -> Tokeniser<Q, E, S> {
let mut new = self.clone();
match (chr, self) {
// ERROR
// Found an error
// -> Ignore input
( _, Tokeniser { error: Some(_), .. } ) => (),
// ESCAPE SEQUENCES
// Currently escaping
// -> Escape via escape scheme.
( c, Tokeniser { escape: Some(s), .. } ) => match s.escape(c) {
Some(cc) => new.emit(cc),
None => { new.error = Some(BadEscape); }
},
// ESCAPE LEADER
// Escape leader, not in quotes
// -> Begin escape (and word if not in one already)
( c, Tokeniser { escape: None,
quote: None,
escape_map: ref e, .. } ) if e.contains_key(&c) =>
new.start_escaping(c),
// Escape leader, in escape-permitting quotes
// -> Begin escape (and word if not in one already)
( c, Tokeniser { escape: None,
quote: Some(( _, ParseEscapes )),
escape_map: ref e, .. } ) if e.contains_key(&c) =>
new.start_escaping(c),
// QUOTE OPENING
// Quote opening character, not currently in quoted word
// -> Start quoting
( c, Tokeniser { escape: None, quote: None, quote_map: ref q, .. } )
if q.contains_key(&c) => {
new.quote = Some(q.find(&c).unwrap().clone());
new.in_word = true;
},
// QUOTE CLOSING
// Quote closing character, in quoted word, quotes ok
// -> Stop quoting
( c, Tokeniser { escape: None, quote: Some(( cc, _ )), .. } )
if c == cc => {
new.quote = None;
new.in_word = true;
},
// UNESCAPED WHITESPACE
// Unescaped whitespace, while not in a word
// -> Ignore
( a, Tokeniser { escape: None, in_word: false, .. } )
if is_whitespace(a) => (),
// Unescaped whitespace, while in a non-quoted word
// -> End word
( a, Tokeniser { escape: None, in_word: true, quote: None, .. } )
if is_whitespace(a) => {
new.in_word = false;
new.vec.push(String::new());
},
// DEFAULT
// Anything else
// -> Echo
( a, _ ) => new.emit(a)
}
new
}
/// Feeds an Iterator of chars, `it`, into the Tokeniser.
///
/// # Return value
///
/// A new Tokeniser, representing the state of the Tokeniser after
/// consuming the characters in `it`.
pub fn add_iter<I: Iterator<char>>(self, mut it: I) -> Tokeniser<Q, E, S> {
it.fold(self, |s, chr| s.add_char(chr))
}
/// Feeds a string, `string`, into the Tokeniser.
///
/// # Return value
///
/// A new Tokeniser, representing the state of the Tokeniser after
/// consuming `string`.
pub fn add_string(self, string: &str) -> Tokeniser<Q, E, S> {
self.add_iter(string.chars())
}
/// Feeds a line, `line`, into the Tokeniser.
/// This differs from `add_str` in that the line is whitespace-trimmed
/// before adding.
///
/// # Return value
///
/// A new Tokeniser, representing the state of the Tokeniser after
/// consuming `line`.
pub fn add_line(self, line: &str) -> Tokeniser<Q, E, S> {
self.add_string(line.trim())
}
/// Destroys the tokeniser, extracting the string vector.
///
/// # Return value
///
/// A Result, containing the tokenised string vector if the Tokeniser
/// was in a valid ending state, and an Error otherwise.
pub fn into_strings(mut self) -> Result<Vec<String>, Error> {
if self.error.is_some() {
Err(self.error.unwrap())
} else if self.in_word && self.quote.is_some() {
Err(UnmatchedQuote)
} else if self.escape.is_some() {
Err(UnfinishedEscape)
} else {
self.drop_empty_current_string();
Ok(self.vec)
}
}
/// Adds a character into a Tokeniser's current string.
/// This automatically sets the Tokeniser's state to be in a word,
/// and clears any escape sequence flag.
fn emit(&mut self, c: char) {
self.in_word = true;
self.escape = None;
self.vec.mut_last().mutate(|s| { s.push_char(c); s });
}
/// Switches on escape mode.
/// This automatically sets the Tokeniser to be in a word, if it isn't
/// already.
fn start_escaping(&mut self, c: char) {
self.escape = self.escape_map.find(&c).map(|a| a.clone());
self.in_word = true;
}
/// Drops the current working string, if it is empty.
fn drop_empty_current_string(&mut self) {
if self.vec.last().map(|s| s.is_empty()).unwrap_or(false) {
self.vec.pop();
}
}
}<|fim▁end|>
|
///
/// * `quote_map` - A map, mapping characters that serve as opening quotes
/// to their closing quotes and quote modes.
|
<|file_name|>fashion_mnist_cnn.py<|end_file_name|><|fim▁begin|>'''Trains a simple convnet on the Fashion MNIST dataset.
Gets to % test accuracy after 12 epochs
(there is still a lot of margin for parameter tuning).
'''
from __future__ import print_function
import keras
from keras.datasets import fashion_mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K
batch_size = 128
num_classes = 10
epochs = 12
# input image dimensions
img_rows, img_cols = 28, 28
# the data, shuffled and split between train and test sets
(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()
if K.image_data_format() == 'channels_first':
x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)<|fim▁hole|> input_shape = (1, img_rows, img_cols)
else:
x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)
x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test))
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])<|fim▁end|>
|
x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)
|
<|file_name|>bitcoin_id_ID.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="id_ID" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About minty</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>minty</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The minty developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Klik-ganda untuk mengubah alamat atau label</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Buat alamat baru</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Salin alamat yang dipilih ke clipboard</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your minty addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation>&Salin Alamat</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a minty address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified minty address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Hapus</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation>Salin &Label</translation>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation>&Ubah</translation>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>File CSV (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(tidak ada label)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Dialog Kata kunci</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Masukkan kata kunci</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Kata kunci baru</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Ulangi kata kunci baru</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Masukkan kata kunci baru ke dompet.<br/>Mohon gunakan kata kunci dengan <b>10 karakter atau lebih dengan acak</b>, atau <b>delapan kata atau lebih</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Enkripsi dompet</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Operasi ini memerlukan kata kunci dompet Anda untuk membuka dompet ini.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Buka dompet</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Operasi ini memerlukan kata kunci dompet Anda untuk mendekripsi dompet ini.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Dekripsi dompet</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Ubah kata kunci</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Masukkan kata kunci lama dan baru ke dompet ini.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Konfirmasi enkripsi dompet</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Dompet terenkripsi</translation>
</message>
<message>
<location line="-58"/>
<source>minty will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Enkripsi dompet gagal</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Enkripsi dompet gagal karena kesalahan internal. Dompet Anda tidak dienkripsi.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Kata kunci yang dimasukkan tidak cocok.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Gagal buka dompet</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Kata kunci yang dimasukkan untuk dekripsi dompet tidak cocok.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Dekripsi dompet gagal</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation>Pesan &penanda...</translation>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation>Sinkronisasi dengan jaringan...</translation>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation>&Kilasan</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Tampilkan kilasan umum dari dompet</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Transaksi</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Jelajah sejarah transaksi</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation>K&eluar</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Keluar dari aplikasi</translation>
</message>
<message>
<location line="+6"/>
<source>Show information about minty</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Mengenai &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Tampilkan informasi mengenai Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Pilihan...</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation>%Enkripsi Dompet...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Cadangkan Dompet...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Ubah Kata Kunci...</translation>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send coins to a minty address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for minty</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation>Cadangkan dompet ke lokasi lain</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Ubah kata kunci yang digunakan untuk enkripsi dompet</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation>&Jendela Debug</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Buka konsol debug dan diagnosa</translation>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation>&Verifikasi pesan...</translation>
</message>
<message>
<location line="-202"/>
<source>minty</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation>Dompet</translation>
</message>
<message>
<location line="+180"/>
<source>&About minty</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation>&Berkas</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Pengaturan</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>&Bantuan</translation>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation>Baris tab</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>minty client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to minty network</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About minty card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about minty card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>Terbaru</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Menyusul...</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Transaksi terkirim</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Transaksi diterima</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Tanggal: %1
Jumlah: %2
Jenis: %3
Alamat: %4
</translation>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid minty address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Dompet saat ini <b>terenkripsi</b> dan <b>terbuka</b></translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Dompet saat ini <b>terenkripsi</b> dan <b>terkunci</b></translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. minty can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation>Notifikasi Jaringan</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Jumlah:</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>Jumlah</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Tanggal</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation>Terkonfirmasi</translation>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation>Salin alamat</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Salin label</translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation>Salin jumlah</translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(tidak ada label)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Ubah Alamat</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Label</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Alamat</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Alamat menerima baru</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Alamat mengirim baru</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Ubah alamat menerima</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Ubah alamat mengirim</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Alamat yang dimasukkan "%1" sudah ada di dalam buku alamat.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid minty address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Tidak dapat membuka dompet.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Pembuatan kunci baru gagal.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>minty-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Pilihan</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Utama</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Bayar &biaya transaksi</translation>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start minty after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start minty on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation>&Jaringan</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the minty client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Petakan port dengan &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the minty network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>IP Proxy:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Port:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Port proxy (cth. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>Versi &SOCKS:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Versi SOCKS proxy (cth. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Jendela</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Hanya tampilkan ikon tray setelah meminilisasi jendela</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Meminilisasi ke tray daripada taskbar</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&eminilisasi saat tutup</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Tampilan</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>&Bahasa Antarmuka Pengguna:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting minty.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Unit untuk menunjukkan jumlah:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show minty addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>&Tampilkan alamat dalam daftar transaksi</translation>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&YA</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Batal</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation>standar</translation>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting minty.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>Alamat proxy yang diisi tidak valid.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formulir</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the minty network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation>Dompet</translation>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Transaksi sebelumnya</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation>tidak tersinkron</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Nama Klien</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation>T/S</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Versi Klien</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informasi</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Waktu nyala</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Jaringan</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Jumlah hubungan</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Rantai blok</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Jumlah blok terkini</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Perkiraan blok total</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Waktu blok terakhir</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Buka</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the minty-Qt help message to get a list with possible minty command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Konsol</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Tanggal pembuatan</translation>
</message>
<message>
<location line="-104"/>
<source>minty - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>minty Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the minty debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Bersihkan konsol</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the minty RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Gunakan panah keatas dan kebawah untuk menampilkan sejarah, dan <b>Ctrl-L</b> untuk bersihkan layar.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Ketik <b>help</b> untuk menampilkan perintah tersedia.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Kirim Koin</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation>Jumlah:</translation>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 BSP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Kirim ke beberapa penerima sekaligus</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Hapus %Semua</translation>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 BSP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Konfirmasi aksi pengiriman</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a minty address (e.g. mintycoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Salin jumlah</translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Konfirmasi pengiriman koin</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Jumlah yang dibayar harus lebih besar dari 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>Jumlah melebihi saldo Anda.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Kelebihan total saldo Anda ketika biaya transaksi %1 ditambahkan.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Ditemukan alamat ganda, hanya dapat mengirim ke tiap alamat sekali per operasi pengiriman.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid minty address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(tidak ada label)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>J&umlah:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Kirim &Ke:</translation>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Masukkan label bagi alamat ini untuk menambahkannya ke buku alamat Anda</translation>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>&Label:</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. mintycoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+J</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Tempel alamat dari salinan</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+B</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a minty address (e.g. mintycoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. mintycoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+J</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Tempel alamat dari salinan</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+B</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this minty address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Hapus %Semua</translation>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. mintycoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified minty address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a minty address (e.g. mintycoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter minty signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>Alamat yang dimasukkan tidak sesuai.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Silahkan periksa alamat dan coba lagi.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Buka hingga %1</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/tidak terkonfirmasi</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 konfirmasi</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Status</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Tanggal</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>Dari</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Untuk</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Pesan:</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transaksi</translation>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Jumlah</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation>, belum berhasil disiarkan</translation>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>tidak diketahui</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Rincian transaksi</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Jendela ini menampilkan deskripsi rinci dari transaksi tersebut</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Tanggal</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Jenis</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Jumlah</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>Buka hingga %1</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Terkonfirmasi (%1 konfirmasi)</translation>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Blok ini tidak diterima oleh node lainnya dan kemungkinan tidak akan diterima!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Terbuat tetapi tidak diterima</translation>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Diterima dengan</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Diterima dari</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Terkirim ke</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Pembayaran ke Anda sendiri</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Tertambang</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(t/s)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Status transaksi. Arahkan ke bagian ini untuk menampilkan jumlah konfrimasi.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Tanggal dan waktu transaksi tersebut diterima.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Jenis transaksi.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Alamat tujuan dari transaksi.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Jumlah terbuang dari atau ditambahkan ke saldo.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Semua</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Hari ini</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Minggu ini</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Bulan ini</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Bulan kemarin</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Tahun ini</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Jarak...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>DIterima dengan</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Terkirim ke</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Ke Anda sendiri</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Ditambang</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Lainnya</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Masukkan alamat atau label untuk mencari</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Jumlah min</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Salin alamat</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Salin label</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Salin jumlah</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Ubah label</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Tampilkan rincian transaksi</translation>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Berkas CSV (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Terkonfirmasi</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Tanggal</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Jenis</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Label</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Alamat</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Jumlah</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Jarak:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>ke</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>minty version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation>Penggunaan:</translation>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or mintyd</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation>Daftar perintah</translation>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation>Dapatkan bantuan untuk perintah</translation>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation>Pilihan:</translation>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: minty.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: minty.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Tentukan direktori data</translation>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Atur ukuran tembolok dalam megabyte (standar: 25)</translation>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Mengatur hubungan paling banyak <n> ke peer (standar: 125)</translation>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Hubungkan ke node untuk menerima alamat peer, dan putuskan</translation>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation>Tentukan alamat publik Anda sendiri</translation>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Batas untuk memutuskan peer buruk (standar: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Jumlah kedua untuk menjaga peer buruk dari hubung-ulang (standar: 86400)</translation>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Menerima perintah baris perintah dan JSON-RPC</translation>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing minty blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Berjalan dibelakang sebagai daemin dan menerima perintah</translation>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation>Gunakan jaringan uji</translation>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong minty will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Kirim info lacak/debug ke konsol sebaliknya dari berkas debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation>Nama pengguna untuk hubungan JSON-RPC</translation>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation>Kata sandi untuk hubungan JSON-RPC</translation>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=versarpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "minty Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Izinkan hubungan JSON-RPC dari alamat IP yang ditentukan</translation>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Kirim perintah ke node berjalan pada <ip> (standar: 127.0.0.1)</translation>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Menjalankan perintah ketika perubahan blok terbaik (%s dalam cmd digantikan oleh hash blok)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation>Perbarui dompet ke format terbaru</translation>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Kirim ukuran kolam kunci ke <n> (standar: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Pindai ulang rantai-blok untuk transaksi dompet yang hilang</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Gunakan OpenSSL (https) untuk hubungan JSON-RPC</translation>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Berkas sertifikat server (standar: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Kunci pribadi server (standar: server.pem)</translation>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation>Pesan bantuan ini</translation><|fim▁hole|> <translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. minty is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>minty</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Tidak dapat mengikat ke %s dengan komputer ini (ikatan gagal %d, %s)</translation>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Izinkan peninjauan DNS untuk -addnote, -seednode dan -connect</translation>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation>Memuat alamat...</translation>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Gagal memuat wallet.dat: Dompet rusak</translation>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of minty</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart minty to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation>Gagal memuat wallet.dat</translation>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Alamat -proxy salah: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Jaringan tidak diketahui yang ditentukan dalam -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Diminta versi proxy -socks tidak diketahui: %i</translation>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Tidak dapat menyelesaikan alamat -bind: '%s'</translation>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Tidak dapat menyelesaikan alamat -externalip: '%s'</translation>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Jumlah salah untuk -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation>Jumlah salah</translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation>Saldo tidak mencukupi</translation>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation>Memuat indeks blok...</translation>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Tambahkan node untuk dihubungkan dan upaya untuk menjaga hubungan tetap terbuka</translation>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. minty is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation>Memuat dompet...</translation>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation>Tidak dapat menurunkan versi dompet</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation>Tidak dapat menyimpan alamat standar</translation>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation>Memindai ulang...</translation>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation>Memuat selesai</translation>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation>Gunakan pilihan %s</translation>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation>Gagal</translation>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Anda harus mengatur rpcpassword=<kata sandi> dalam berkas konfigurasi:
%s
Jika berkas tidak ada, buatlah dengan permisi berkas hanya-dapat-dibaca-oleh-pemilik.</translation>
</message>
</context>
</TS><|fim▁end|>
|
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
|
<|file_name|>test_mgmt_containerregistry_2017_03_01.py<|end_file_name|><|fim▁begin|># coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import unittest
from azure.mgmt.containerregistry.v2017_03_01.models import (<|fim▁hole|> SkuTier,
ProvisioningState,
PasswordName
)
import azure.mgmt.storage
from devtools_testutils import (
AzureMgmtTestCase, FakeStorageAccount,
ResourceGroupPreparer, StorageAccountPreparer
)
FAKE_STORAGE = FakeStorageAccount(
name='pyacr',
id=''
)
DEFAULT_LOCATION = 'westcentralus'
DEFAULT_SKU_NAME = 'Basic'
DEFAULT_KEY_VALUE_PAIR = {
'key': 'value'
}
class MgmtACRTest20170301(AzureMgmtTestCase):
def setUp(self):
super(MgmtACRTest20170301, self).setUp()
self.client = self.create_mgmt_client(
azure.mgmt.containerregistry.ContainerRegistryManagementClient,
api_version='2017-03-01'
)
@ResourceGroupPreparer(location=DEFAULT_LOCATION)
@StorageAccountPreparer(name_prefix='pyacr', location=DEFAULT_LOCATION, playback_fake_resource=FAKE_STORAGE)
def test_basic_registry(self, resource_group, location, storage_account, storage_account_key):
registry_name = self.get_resource_name('pyacr')
name_status = self.client.registries.check_name_availability(registry_name)
self.assertTrue(name_status.name_available)
# Create a Basic registry
registry = self.client.registries.create(
resource_group_name=resource_group.name,
registry_name=registry_name,
registry_create_parameters=RegistryCreateParameters(
location=location,
sku=Sku(
name=DEFAULT_SKU_NAME
),
storage_account=StorageAccountParameters(
name=storage_account.name,
access_key=storage_account_key
)
)
).result()
self.assertEqual(registry.name, registry_name)
self.assertEqual(registry.location, location)
self.assertEqual(registry.sku.name, DEFAULT_SKU_NAME)
self.assertEqual(registry.sku.tier, SkuTier.basic.value)
self.assertEqual(registry.provisioning_state.value, ProvisioningState.succeeded.value)
self.assertEqual(registry.admin_user_enabled, False)
registries = list(self.client.registries.list_by_resource_group(resource_group.name))
self.assertEqual(len(registries), 1)
# Update the registry with new tags and enable admin user
registry = self.client.registries.update(
resource_group_name=resource_group.name,
registry_name=registry_name,
registry_update_parameters=RegistryUpdateParameters(
tags=DEFAULT_KEY_VALUE_PAIR,
admin_user_enabled=True
)
)
self.assertEqual(registry.name, registry_name)
self.assertEqual(registry.tags, DEFAULT_KEY_VALUE_PAIR)
self.assertEqual(registry.admin_user_enabled, True)
registry = self.client.registries.get(resource_group.name, registry_name)
self.assertEqual(registry.name, registry_name)
self.assertEqual(registry.tags, DEFAULT_KEY_VALUE_PAIR)
self.assertEqual(registry.admin_user_enabled, True)
credentials = self.client.registries.list_credentials(resource_group.name, registry_name)
self.assertEqual(len(credentials.passwords), 2)
credentials = self.client.registries.regenerate_credential(
resource_group.name, registry_name, PasswordName.password)
self.assertEqual(len(credentials.passwords), 2)
self.client.registries.delete(resource_group.name, registry_name)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
RegistryCreateParameters,
RegistryUpdateParameters,
StorageAccountParameters,
Sku,
|
<|file_name|>test_fibermatch.py<|end_file_name|><|fim▁begin|>import pytest
from megaradrp.processing.fibermatch import generate_box_model
from megaradrp.processing.fibermatch import count_peaks
PEAKS = [
3.806000000000000000e+03,
3.812000000000000000e+03,
3.818000000000000000e+03,
3.824000000000000000e+03,<|fim▁hole|> 3.842000000000000000e+03,
3.848000000000000000e+03,
3.854000000000000000e+03,
3.860000000000000000e+03,
3.867000000000000000e+03,
3.872000000000000000e+03,
3.878000000000000000e+03,
3.884000000000000000e+03,
3.890000000000000000e+03,
3.897000000000000000e+03,
3.903000000000000000e+03,
3.909000000000000000e+03,
3.915000000000000000e+03,
3.921000000000000000e+03
]
def test_generate_model():
expected = [
(1, 0),
(2, 0),
(3, 0),
(4, 0),
(5, 0)
]
model = generate_box_model(5, start=1)
assert len(model) == len(expected)
for m, e in zip(model, expected):
assert m == e
expected = [
(1, 0),
(2, 1),
(3, 0),
(4, 0),
(5, 0)
]
model = generate_box_model(5, missing_relids=[2])
assert len(model) == len(expected)
for m, e in zip(model, expected):
assert m == e
expected = [
(10, 0),
(12, 1),
(13, 0),
(14, 0),
(15, 0)
]
model = generate_box_model(5, start=10, skip_fibids=[11], missing_relids=[2])
assert len(model) == len(expected)
for m, e in zip(model, expected):
assert m == e
def test_count_peaks1():
with pytest.raises(ValueError):
count_peaks([])
def test_count_peaks():
expected = []
idx = 0
for p in PEAKS:
t = (idx + 1, p, 0, idx)
expected.append(t)
idx += 1
result = count_peaks(PEAKS, tol=1.2, distance=6.0)
assert result == expected<|fim▁end|>
|
3.830000000000000000e+03,
3.836000000000000000e+03,
|
<|file_name|>test_funcs.py<|end_file_name|><|fim▁begin|>import unittest
from libs.funcs import *
class TestFuncs(unittest.TestCase):
<|fim▁hole|>
recPaths, repPaths, rouPaths, corePaths = buildPaths()
findTxt = lambda x, y: x.find(y) > -1
assert findTxt(recPaths["Task"][0], "base")
assert findTxt(recPaths["Department"][0], "StdPy")
assert findTxt(recPaths["Department"][1], "standard")
assert findTxt(repPaths["ListWindowReport"][0], "base")
assert findTxt(repPaths["ExpensesList"][0], "StdPy")
assert findTxt(repPaths["ExpensesList"][1], "standard")
assert findTxt(rouPaths["GenNLT"][0], "StdPy")
assert findTxt(rouPaths["GenNLT"][1], "standard")
assert findTxt(corePaths["Field"][0], "embedded")
self.assertFalse([k for (k, v) in rouPaths.iteritems() if findTxt(v[0], "base")]) #no routines in base
def test_recordInheritance(self):
recf, recd = getRecordInheritance("Invoice")
assert all([f1 in recf for f1 in ("SalesMan", "InvoiceDate", "CustCode", "Currency", "ShiftDate", "OriginNr", "SerNr", "attachFlag")])
assert all([d in recd for d in ("CompoundItemCosts", "Payments", "Items", "Taxes", "Installs")])
recf, recd = getRecordInheritance("AccessGroup")
assert all([f2 in recf for f2 in ("PurchaseItemsAccessType", "InitialModule", "Closed", "internalId")])
assert all([d in recd for d in ("PurchaseItems", "Customs", "Modules")])
def test_recordsInfo(self):
recf, recd = getRecordsInfo("Department", RECORD)
assert recf["Department"]["AutoCashCancel"] == "integer" #From StdPy
assert recf["Department"]["DeptName"] == "string" #From standard
assert recf["Department"]["Closed"] == "Boolean" #From Master
assert recf["Department"]["internalId"] == "internalid" #From Record
assert recd["Department"]["OfficePayModes"] == "DepartmentOfficePayModeRow" #Recordname from detail
repf, repd = getRecordsInfo("Balance", REPORT)
assert repf["Balance"]["LabelType"] == "string" #StdPy
assert repf["Balance"]["ExplodeByLabel"] == "boolean" #Standard
assert repf["Balance"]["internalId"] == "internalid" #Record
assert not repd["Balance"] #Empty dict, no detail
rouf, roud = getRecordsInfo("GenNLT", ROUTINE)
assert rouf["GenNLT"]["ExcludeInvalid"] == "boolean"
assert rouf["GenNLT"]["Table"] == "string"
assert not roud["GenNLT"]
rouf, roud = getRecordsInfo("LoginDialog", RECORD)
assert rouf["LoginDialog"]["Password"] == "string" #embedded
assert not roud["LoginDialog"]
def test_classInfo(self):
attr, meth = getClassInfo("Invoice")
assert attr["DEBITNOTE"] == 2
assert attr["ATTACH_NOTE"] == 3
assert attr["rowNr"] == 0
assert attr["ParentInvoice"] == "SuperClass"
assert isinstance(attr["DocTypes"], list)
assert isinstance(attr["Origin"], dict)
assert all([m in meth for m in ("getCardReader", "logTransactionAction", "updateCredLimit",
"generateTaxes", "roundValue", "getOriginType", "bring", "getXML", "createField")])
assert meth["fieldIsEditable"][0] == "self"
assert meth["fieldIsEditable"][1] == "fieldname"
assert meth["fieldIsEditable"][2] == {"rowfieldname":'None'}
assert meth["fieldIsEditable"][3] == {"rownr":'None'}
attr, meth = getClassInfo("User")
assert attr["buffer"] == "RecordBuffer"
assert all([m in meth for m in ("store", "save", "load", "hasField")])
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestFuncs))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')<|fim▁end|>
|
def test_buildPaths(self):
|
<|file_name|>svg-sample-icons.js<|end_file_name|><|fim▁begin|>/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
import"../iron-iconset-svg.js";import{html as c}from"../../polymer/lib/utils/html-tag.js";const t=c`<iron-iconset-svg name="svg-sample-icons" size="100">
<svg>
<defs>
<g id="codepen">
<path class="outer-ring" d="M50,0C22.385,0,0,22.385,0,50c0,27.615,22.385,50,50,50c27.614,0,50-22.385,50-50C100,22.385,77.615,0,50,0z M50,91.789
C26.958,91.789,8.212,73.042,8.212,50C8.212,26.958,26.958,8.212,50,8.212c23.042,0,41.788,18.747,41.788,41.789
C91.788,73.042,73.042,91.789,50,91.789z"></path>
<path class="inner-logo" d="M80.893,40.234c-0.006-0.039-0.016-0.076-0.022-0.115c-0.013-0.075-0.027-0.15-0.046-0.223
c-0.012-0.044-0.028-0.086-0.042-0.128c-0.021-0.065-0.042-0.13-0.068-0.193c-0.018-0.044-0.039-0.088-0.059-0.13
c-0.028-0.06-0.057-0.119-0.09-0.175c-0.024-0.042-0.051-0.083-0.076-0.124c-0.036-0.055-0.073-0.109-0.112-0.161
c-0.029-0.039-0.06-0.078-0.091-0.115c-0.042-0.049-0.086-0.098-0.132-0.143c-0.035-0.036-0.069-0.072-0.106-0.104
c-0.049-0.044-0.099-0.086-0.15-0.127c-0.04-0.031-0.079-0.062-0.12-0.091c-0.016-0.01-0.029-0.023-0.044-0.033L51.474,19.531
c-0.893-0.595-2.055-0.595-2.947,0L20.267,38.371c-0.015,0.01-0.028,0.023-0.044,0.033c-0.042,0.029-0.081,0.06-0.12,0.091
c-0.052,0.041-0.102,0.083-0.15,0.127c-0.037,0.032-0.071,0.068-0.106,0.104c-0.046,0.045-0.09,0.094-0.132,0.143
c-0.031,0.038-0.062,0.077-0.092,0.115c-0.039,0.052-0.076,0.106-0.111,0.161c-0.027,0.041-0.052,0.082-0.076,0.124
c-0.033,0.057-0.062,0.115-0.09,0.175c-0.021,0.042-0.042,0.086-0.06,0.13c-0.026,0.063-0.047,0.128-0.068,0.193
c-0.014,0.042-0.029,0.084-0.042,0.128c-0.02,0.073-0.032,0.148-0.046,0.223c-0.006,0.039-0.016,0.076-0.021,0.115
c-0.016,0.114-0.024,0.229-0.024,0.346V59.42c0,0.117,0.009,0.233,0.024,0.348c0.005,0.038,0.015,0.077,0.021,0.114
c0.014,0.075,0.027,0.149,0.046,0.223c0.012,0.043,0.028,0.086,0.042,0.128c0.021,0.065,0.042,0.13,0.068,0.195
c0.018,0.044,0.039,0.086,0.06,0.129c0.028,0.06,0.058,0.118,0.09,0.177c0.024,0.041,0.049,0.082,0.076,0.122
c0.035,0.056,0.072,0.109,0.111,0.161c0.029,0.041,0.061,0.078,0.092,0.115c0.042,0.049,0.086,0.098,0.132,0.144
c0.035,0.036,0.069,0.071,0.106,0.104c0.048,0.044,0.099,0.086,0.15,0.127c0.039,0.031,0.078,0.062,0.12,0.091
c0.016,0.01,0.029,0.023,0.044,0.032l28.259,18.84c0.446,0.297,0.96,0.447,1.474,0.447c0.513,0,1.027-0.149,1.473-0.447
l28.259-18.84c0.015-0.009,0.028-0.022,0.044-0.032c0.042-0.029,0.081-0.06,0.12-0.091c0.051-0.041,0.102-0.083,0.15-0.127
c0.037-0.033,0.071-0.068,0.106-0.104c0.046-0.046,0.09-0.095,0.132-0.144c0.031-0.037,0.062-0.075,0.091-0.115
c0.04-0.052,0.076-0.105,0.112-0.161c0.025-0.041,0.051-0.081,0.076-0.122c0.033-0.059,0.062-0.117,0.09-0.177
c0.02-0.042,0.041-0.085,0.059-0.129c0.026-0.065,0.047-0.13,0.068-0.195c0.014-0.042,0.03-0.085,0.042-0.128
c0.02-0.074,0.033-0.148,0.046-0.223c0.006-0.037,0.016-0.076,0.022-0.114c0.014-0.115,0.023-0.231,0.023-0.348V40.581
C80.916,40.464,80.907,40.348,80.893,40.234z M52.657,26.707l20.817,13.877l-9.298,6.221l-11.519-7.706V26.707z M47.343,26.707
v12.393l-11.518,7.706l-9.299-6.221L47.343,26.707z M24.398,45.554L31.046,50l-6.648,4.446V45.554z M47.343,73.294L26.525,59.417
l9.299-6.219l11.518,7.704V73.294z M50,56.286L40.603,50L50,43.715L59.397,50L50,56.286z M52.657,73.294V60.902l11.519-7.704
l9.298,6.219L52.657,73.294z M75.602,54.447L68.955,50l6.647-4.446V54.447z"></path>
</g>
<path id="twitter" d="M100.001,17.942c-3.681,1.688-7.633,2.826-11.783,3.339
c4.236-2.624,7.49-6.779,9.021-11.73c-3.965,2.432-8.354,4.193-13.026,5.146C80.47,10.575,75.138,8,69.234,8
c-11.33,0-20.518,9.494-20.518,21.205c0,1.662,0.183,3.281,0.533,4.833c-17.052-0.884-32.168-9.326-42.288-22.155
c-1.767,3.133-2.778,6.773-2.778,10.659c0,7.357,3.622,13.849,9.127,17.65c-3.363-0.109-6.525-1.064-9.293-2.651
c-0.002,0.089-0.002,0.178-0.002,0.268c0,10.272,7.072,18.845,16.458,20.793c-1.721,0.484-3.534,0.744-5.405,0.744
c-1.322,0-2.606-0.134-3.859-0.379c2.609,8.424,10.187,14.555,19.166,14.726c-7.021,5.688-15.867,9.077-25.48,9.077
c-1.656,0-3.289-0.102-4.895-0.297C9.08,88.491,19.865,92,31.449,92c37.737,0,58.374-32.312,58.374-60.336
c0-0.92-0.02-1.834-0.059-2.743C93.771,25.929,97.251,22.195,100.001,17.942L100.001,17.942z"></path>
<g id="youtube">
<path class="youtube" d="M98.77,27.492c-1.225-5.064-5.576-8.799-10.811-9.354C75.561,16.818,63.01,15.993,50.514,16
c-12.495-0.007-25.045,0.816-37.446,2.139c-5.235,0.557-9.583,4.289-10.806,9.354C0.522,34.704,0.5,42.574,0.5,50.001
c0,7.426,0,15.296,1.741,22.509c1.224,5.061,5.572,8.799,10.807,9.352c12.399,1.32,24.949,2.145,37.446,2.14
c12.494,0.005,25.047-0.817,37.443-2.14c5.234-0.555,9.586-4.291,10.81-9.352c1.741-7.213,1.753-15.083,1.753-22.509
S100.51,34.704,98.77,27.492 M67.549,52.203L43.977,64.391c-2.344,1.213-4.262,0.119-4.262-2.428V38.036
c0-2.548,1.917-3.644,4.262-2.429l23.572,12.188C69.896,49.008,69.896,50.992,67.549,52.203"></path><|fim▁hole|>
</defs>
</svg>
</iron-iconset-svg>`;document.head.appendChild(t.content);<|fim▁end|>
|
</g>
|
<|file_name|>shared.ts<|end_file_name|><|fim▁begin|>import { NgModule, ModuleWithProviders } from '@angular/core';<|fim▁hole|>import { CommonModule } from "@angular/common";
import { SharedComponent } from './src/components/shared.component';
import { SharedService } from './src/services/shared.service';
export * from './src/components/shared.component';
export * from './src/services/shared.service';
@NgModule({
imports: [CommonModule],
declarations: [SharedComponent],
exports: [SharedComponent],
providers: [],
})
export class SharedModule {
static forRoot(): ModuleWithProviders {
return {
ngModule: SharedModule,
providers: [SharedService]
}
}
}<|fim▁end|>
| |
<|file_name|>ng2-file-upload.component.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../../typings/tsd.d.ts" />
import {Component} from '@angular/core';
import { Http, Response, Headers, RequestOptions} from '@angular/http';
import {CORE_DIRECTIVES, FORM_DIRECTIVES, NgClass, NgStyle} from '@angular/common';
import {FILE_UPLOAD_DIRECTIVES, FileSelectDirective,FileDropDirective, FileUploader} from 'ng2-file-upload';
import { CONFIG } from '../../config';
import { ProcessMessageService } from '../../services/processmessage.service';
import { UserSession, UserIdentity, Authentication} from '../../helpers/classes';
let uploadsUrl = CONFIG.baseUrls.uploads;
let uploadsAttachUrl = CONFIG.baseUrls.uploadsattach;
@Component({
selector: 'ng2-file-upload',
templateUrl: './app/views/file-upload/ng2-file-upload.component.html',
directives: [FILE_UPLOAD_DIRECTIVES, NgClass, NgStyle, CORE_DIRECTIVES, FORM_DIRECTIVES]
<|fim▁hole|> private session: UserSession;
private identity: UserIdentity = new UserIdentity;
private isAuthenticated: boolean = false;
private isAllowed: boolean = false;
constructor(private _pmService: ProcessMessageService) {}
private uploader: FileUploader = new FileUploader({
url: uploadsUrl,
queueLimit: 5,
maxFileSize: 1024*1024,
});
private hasBaseDropZoneOver: boolean = true;
private hasAnotherDropZoneOver: boolean = false;
private uploadSingleFile(item: any) {
item.withCredentials = false;
item.file.name = "ArticleId" + item.file.name;
item.upload();
}
private fileOverBase(e: any) {
this.hasBaseDropZoneOver = e;
}
private fileOverAnother(e: any) {
this.hasAnotherDropZoneOver = e;
}
}<|fim▁end|>
|
})
export class NG2FileUploadComponent {
|
<|file_name|>games.js<|end_file_name|><|fim▁begin|>var express = require('express');
var router = express.Router();
// Play a game from the DB
router.get('/games', function (req, res) {<|fim▁hole|><|fim▁end|>
|
res.render('games', data);
});
module.exports = router;
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Types/fns concerning URLs (see RFC 3986)
#![crate_name = "url"]
#![deprecated="This is being removed. Use rust-url instead. http://servo.github.io/rust-url/"]
#![allow(deprecated)]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![license = "MIT/ASL2"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![feature(default_type_params)]
use std::collections::HashMap;
use std::collections::hashmap::{Occupied, Vacant};
use std::fmt;
use std::from_str::FromStr;
use std::hash;
use std::uint;
use std::path::BytesContainer;
/// A Uniform Resource Locator (URL). A URL is a form of URI (Uniform Resource
/// Identifier) that includes network location information, such as hostname or
/// port number.
///
/// # Example
///
/// ```rust
/// # #![allow(deprecated)]
/// use url::Url;
///
/// let raw = "https://[email protected]:8080/foo/bar?baz=qux#quz";
/// match Url::parse(raw) {
/// Ok(u) => println!("Parsed '{}'", u),
/// Err(e) => println!("Couldn't parse '{}': {}", raw, e),
/// }
/// ```
#[deriving(Clone, PartialEq, Eq)]
pub struct Url {
/// The scheme part of a URL, such as `https` in the above example.
pub scheme: String,
/// A URL subcomponent for user authentication. `username` in the above example.
pub user: Option<UserInfo>,
/// A domain name or IP address. For example, `example.com`.
pub host: String,
/// A TCP port number, for example `8080`.
pub port: Option<u16>,
/// The path component of a URL, for example `/foo/bar?baz=qux#quz`.
pub path: Path,
}
#[deriving(Clone, PartialEq, Eq)]
pub struct Path {
/// The path component of a URL, for example `/foo/bar`.
pub path: String,
/// The query component of a URL.
/// `vec![("baz".to_string(), "qux".to_string())]` represents the fragment
/// `baz=qux` in the above example.
pub query: Query,
/// The fragment component, such as `quz`. Not including the leading `#` character.
pub fragment: Option<String>
}
/// An optional subcomponent of a URI authority component.
#[deriving(Clone, PartialEq, Eq)]
pub struct UserInfo {
/// The user name.
pub user: String,
/// Password or other scheme-specific authentication information.
pub pass: Option<String>
}
/// Represents the query component of a URI.
pub type Query = Vec<(String, String)>;
impl Url {
pub fn new(scheme: String,
user: Option<UserInfo>,
host: String,
port: Option<u16>,
path: String,
query: Query,
fragment: Option<String>)
-> Url {
Url {
scheme: scheme,
user: user,
host: host,
port: port,
path: Path::new(path, query, fragment)
}
}
/// Parses a URL, converting it from a string to a `Url` representation.
///
/// # Arguments
/// * rawurl - a string representing the full URL, including scheme.
///
/// # Return value
///
/// `Err(e)` if the string did not represent a valid URL, where `e` is a
/// `String` error message. Otherwise, `Ok(u)` where `u` is a `Url` struct
/// representing the URL.
pub fn parse(rawurl: &str) -> DecodeResult<Url> {
// scheme
let (scheme, rest) = try!(get_scheme(rawurl));
// authority
let (userinfo, host, port, rest) = try!(get_authority(rest));
// path
let has_authority = host.len() > 0;
let (path, rest) = try!(get_path(rest, has_authority));
// query and fragment
let (query, fragment) = try!(get_query_fragment(rest));
let url = Url::new(scheme.to_string(),
userinfo,
host.to_string(),
port,
path,
query,
fragment);
Ok(url)
}
}
#[deprecated="use `Url::parse`"]
pub fn from_str(s: &str) -> Result<Url, String> {
Url::parse(s)
}
impl Path {
pub fn new(path: String,
query: Query,
fragment: Option<String>)
-> Path {
Path {
path: path,
query: query,
fragment: fragment,
}
}
/// Parses a URL path, converting it from a string to a `Path` representation.
///
/// # Arguments
/// * rawpath - a string representing the path component of a URL.
///
/// # Return value
///
/// `Err(e)` if the string did not represent a valid URL path, where `e` is a
/// `String` error message. Otherwise, `Ok(p)` where `p` is a `Path` struct
/// representing the URL path.
pub fn parse(rawpath: &str) -> DecodeResult<Path> {
let (path, rest) = try!(get_path(rawpath, false));
// query and fragment
let (query, fragment) = try!(get_query_fragment(rest.as_slice()));
Ok(Path{ path: path, query: query, fragment: fragment })
}
}
<|fim▁hole|>}
impl UserInfo {
#[inline]
pub fn new(user: String, pass: Option<String>) -> UserInfo {
UserInfo { user: user, pass: pass }
}
}
fn encode_inner<T: BytesContainer>(c: T, full_url: bool) -> String {
c.container_as_bytes().iter().fold(String::new(), |mut out, &b| {
match b as char {
// unreserved:
'A' ... 'Z'
| 'a' ... 'z'
| '0' ... '9'
| '-' | '.' | '_' | '~' => out.push_char(b as char),
// gen-delims:
':' | '/' | '?' | '#' | '[' | ']' | '@' |
// sub-delims:
'!' | '$' | '&' | '"' | '(' | ')' | '*' |
'+' | ',' | ';' | '='
if full_url => out.push_char(b as char),
ch => out.push_str(format!("%{:02X}", ch as uint).as_slice()),
};
out
})
}
/// Encodes a URI by replacing reserved characters with percent-encoded
/// character sequences.
///
/// This function is compliant with RFC 3986.
///
/// # Example
///
/// ```rust
/// # #![allow(deprecated)]
/// use url::encode;
///
/// let url = encode("https://example.com/Rust (programming language)");
/// println!("{}", url); // https://example.com/Rust%20(programming%20language)
/// ```
pub fn encode<T: BytesContainer>(container: T) -> String {
encode_inner(container, true)
}
/// Encodes a URI component by replacing reserved characters with percent-
/// encoded character sequences.
///
/// This function is compliant with RFC 3986.
pub fn encode_component<T: BytesContainer>(container: T) -> String {
encode_inner(container, false)
}
pub type DecodeResult<T> = Result<T, String>;
/// Decodes a percent-encoded string representing a URI.
///
/// This will only decode escape sequences generated by `encode`.
///
/// # Example
///
/// ```rust
/// # #![allow(deprecated)]
/// use url::decode;
///
/// let url = decode("https://example.com/Rust%20(programming%20language)");
/// println!("{}", url); // https://example.com/Rust (programming language)
/// ```
pub fn decode<T: BytesContainer>(container: T) -> DecodeResult<String> {
decode_inner(container, true)
}
/// Decode a string encoded with percent encoding.
pub fn decode_component<T: BytesContainer>(container: T) -> DecodeResult<String> {
decode_inner(container, false)
}
fn decode_inner<T: BytesContainer>(c: T, full_url: bool) -> DecodeResult<String> {
let mut out = String::new();
let mut iter = c.container_as_bytes().iter().map(|&b| b);
loop {
match iter.next() {
Some(b) => match b as char {
'%' => {
let bytes = match (iter.next(), iter.next()) {
(Some(one), Some(two)) => [one as u8, two as u8],
_ => return Err(format!("Malformed input: found '%' \
without two trailing bytes")),
};
// Only decode some characters if full_url:
match uint::parse_bytes(bytes, 16u).unwrap() as u8 as char {
// gen-delims:
':' | '/' | '?' | '#' | '[' | ']' | '@' |
// sub-delims:
'!' | '$' | '&' | '"' | '(' | ')' | '*' |
'+' | ',' | ';' | '='
if full_url => {
out.push_char('%');
out.push_char(bytes[0u] as char);
out.push_char(bytes[1u] as char);
}
ch => out.push_char(ch)
}
}
ch => out.push_char(ch)
},
None => return Ok(out),
}
}
}
/// Encode a hashmap to the 'application/x-www-form-urlencoded' media type.
pub fn encode_form_urlencoded(m: &HashMap<String, Vec<String>>) -> String {
fn encode_plus<T: Str>(s: &T) -> String {
s.as_slice().bytes().fold(String::new(), |mut out, b| {
match b as char {
'A' ... 'Z'
| 'a' ... 'z'
| '0' ... '9'
| '_' | '.' | '-' => out.push_char(b as char),
' ' => out.push_char('+'),
ch => out.push_str(format!("%{:X}", ch as uint).as_slice())
}
out
})
}
let mut first = true;
m.iter().fold(String::new(), |mut out, (key, values)| {
let key = encode_plus(key);
for value in values.iter() {
if first {
first = false;
} else {
out.push_char('&');
}
out.push_str(key.as_slice());
out.push_char('=');
out.push_str(encode_plus(value).as_slice());
}
out
})
}
/// Decode a string encoded with the 'application/x-www-form-urlencoded' media
/// type into a hashmap.
pub fn decode_form_urlencoded(s: &[u8])
-> DecodeResult<HashMap<String, Vec<String>>> {
fn maybe_push_value(map: &mut HashMap<String, Vec<String>>,
key: String,
value: String) {
if key.len() > 0 && value.len() > 0 {
match map.entry(key) {
Vacant(entry) => { entry.set(vec![value]); },
Occupied(mut entry) => { entry.get_mut().push(value); },
}
}
}
let mut out = HashMap::new();
let mut iter = s.iter().map(|&x| x);
let mut key = String::new();
let mut value = String::new();
let mut parsing_key = true;
loop {
match iter.next() {
Some(b) => match b as char {
'&' | ';' => {
maybe_push_value(&mut out, key, value);
parsing_key = true;
key = String::new();
value = String::new();
}
'=' => parsing_key = false,
ch => {
let ch = match ch {
'%' => {
let bytes = match (iter.next(), iter.next()) {
(Some(one), Some(two)) => [one as u8, two as u8],
_ => return Err(format!("Malformed input: found \
'%' without two trailing bytes"))
};
uint::parse_bytes(bytes, 16u).unwrap() as u8 as char
}
'+' => ' ',
ch => ch
};
if parsing_key {
key.push_char(ch)
} else {
value.push_char(ch)
}
}
},
None => {
maybe_push_value(&mut out, key, value);
return Ok(out)
}
}
}
}
fn split_char_first(s: &str, c: char) -> (&str, &str) {
let mut iter = s.splitn(1, c);
match (iter.next(), iter.next()) {
(Some(a), Some(b)) => (a, b),
(Some(a), None) => (a, ""),
(None, _) => unreachable!(),
}
}
impl fmt::Show for UserInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.pass {
Some(ref pass) => write!(f, "{}:{}@", self.user, *pass),
None => write!(f, "{}@", self.user),
}
}
}
fn query_from_str(rawquery: &str) -> DecodeResult<Query> {
let mut query: Query = vec!();
if !rawquery.is_empty() {
for p in rawquery.split('&') {
let (k, v) = split_char_first(p, '=');
query.push((try!(decode_component(k)),
try!(decode_component(v))));
}
}
Ok(query)
}
/// Converts an instance of a URI `Query` type to a string.
///
/// # Example
///
/// ```rust
/// # #![allow(deprecated)]
/// let query = vec![("title".to_string(), "The Village".to_string()),
/// ("north".to_string(), "52.91".to_string()),
/// ("west".to_string(), "4.10".to_string())];
/// println!("{}", url::query_to_str(&query)); // title=The%20Village&north=52.91&west=4.10
/// ```
pub fn query_to_str(query: &Query) -> String {
query.iter().enumerate().fold(String::new(), |mut out, (i, &(ref k, ref v))| {
if i != 0 {
out.push_char('&');
}
out.push_str(encode_component(k.as_slice()).as_slice());
out.push_char('=');
out.push_str(encode_component(v.as_slice()).as_slice());
out
})
}
/// Returns a tuple of the URI scheme and the rest of the URI, or a parsing error.
///
/// Does not include the separating `:` character.
///
/// # Example
///
/// ```rust
/// # #![allow(deprecated)]
/// use url::get_scheme;
///
/// let scheme = match get_scheme("https://example.com/") {
/// Ok((sch, _)) => sch,
/// Err(_) => "(None)",
/// };
/// println!("Scheme in use: {}.", scheme); // Scheme in use: https.
/// ```
pub fn get_scheme(rawurl: &str) -> DecodeResult<(&str, &str)> {
for (i,c) in rawurl.chars().enumerate() {
let result = match c {
'A' ... 'Z'
| 'a' ... 'z' => continue,
'0' ... '9' | '+' | '-' | '.' => {
if i != 0 { continue }
Err("url: Scheme must begin with a letter.".to_string())
}
':' => {
if i == 0 {
Err("url: Scheme cannot be empty.".to_string())
} else {
Ok((rawurl.slice(0,i), rawurl.slice(i+1,rawurl.len())))
}
}
_ => Err("url: Invalid character in scheme.".to_string()),
};
return result;
}
Err("url: Scheme must be terminated with a colon.".to_string())
}
// returns userinfo, host, port, and unparsed part, or an error
fn get_authority(rawurl: &str) ->
DecodeResult<(Option<UserInfo>, &str, Option<u16>, &str)> {
enum State {
Start, // starting state
PassHostPort, // could be in user or port
Ip6Port, // either in ipv6 host or port
Ip6Host, // are in an ipv6 host
InHost, // are in a host - may be ipv6, but don't know yet
InPort // are in port
}
#[deriving(Clone, PartialEq)]
enum Input {
Digit, // all digits
Hex, // digits and letters a-f
Unreserved // all other legal characters
}
if !rawurl.starts_with("//") {
// there is no authority.
return Ok((None, "", None, rawurl));
}
let len = rawurl.len();
let mut st = Start;
let mut input = Digit; // most restricted, start here.
let mut userinfo = None;
let mut host = "";
let mut port = None;
let mut colon_count = 0u;
let mut pos = 0;
let mut begin = 2;
let mut end = len;
for (i,c) in rawurl.chars().enumerate()
// ignore the leading '//' handled by early return
.skip(2) {
// deal with input class first
match c {
'0' ... '9' => (),
'A' ... 'F'
| 'a' ... 'f' => {
if input == Digit {
input = Hex;
}
}
'G' ... 'Z'
| 'g' ... 'z'
| '-' | '.' | '_' | '~' | '%'
| '&' |'\'' | '(' | ')' | '+'
| '!' | '*' | ',' | ';' | '=' => input = Unreserved,
':' | '@' | '?' | '#' | '/' => {
// separators, don't change anything
}
_ => return Err("Illegal character in authority".to_string()),
}
// now process states
match c {
':' => {
colon_count += 1;
match st {
Start => {
pos = i;
st = PassHostPort;
}
PassHostPort => {
// multiple colons means ipv6 address.
if input == Unreserved {
return Err(
"Illegal characters in IPv6 address.".to_string());
}
st = Ip6Host;
}
InHost => {
pos = i;
if input == Unreserved {
// must be port
host = rawurl.slice(begin, i);
st = InPort;
} else {
// can't be sure whether this is an ipv6 address or a port
st = Ip6Port;
}
}
Ip6Port => {
if input == Unreserved {
return Err("Illegal characters in authority.".to_string());
}
st = Ip6Host;
}
Ip6Host => {
if colon_count > 7 {
host = rawurl.slice(begin, i);
pos = i;
st = InPort;
}
}
_ => return Err("Invalid ':' in authority.".to_string()),
}
input = Digit; // reset input class
}
'@' => {
input = Digit; // reset input class
colon_count = 0; // reset count
match st {
Start => {
let user = rawurl.slice(begin, i).to_string();
userinfo = Some(UserInfo::new(user, None));
st = InHost;
}
PassHostPort => {
let user = rawurl.slice(begin, pos).to_string();
let pass = rawurl.slice(pos+1, i).to_string();
userinfo = Some(UserInfo::new(user, Some(pass)));
st = InHost;
}
_ => return Err("Invalid '@' in authority.".to_string()),
}
begin = i+1;
}
'?' | '#' | '/' => {
end = i;
break;
}
_ => ()
}
}
// finish up
match st {
Start => host = rawurl.slice(begin, end),
PassHostPort
| Ip6Port => {
if input != Digit {
return Err("Non-digit characters in port.".to_string());
}
host = rawurl.slice(begin, pos);
port = Some(rawurl.slice(pos+1, end));
}
Ip6Host
| InHost => host = rawurl.slice(begin, end),
InPort => {
if input != Digit {
return Err("Non-digit characters in port.".to_string());
}
port = Some(rawurl.slice(pos+1, end));
}
}
let rest = rawurl.slice(end, len);
// If we have a port string, ensure it parses to u16.
let port = match port {
None => None,
opt => match opt.and_then(|p| FromStr::from_str(p)) {
None => return Err(format!("Failed to parse port: {}", port)),
opt => opt
}
};
Ok((userinfo, host, port, rest))
}
// returns the path and unparsed part of url, or an error
fn get_path(rawurl: &str, is_authority: bool) -> DecodeResult<(String, &str)> {
let len = rawurl.len();
let mut end = len;
for (i,c) in rawurl.chars().enumerate() {
match c {
'A' ... 'Z'
| 'a' ... 'z'
| '0' ... '9'
| '&' |'\'' | '(' | ')' | '.'
| '@' | ':' | '%' | '/' | '+'
| '!' | '*' | ',' | ';' | '='
| '_' | '-' | '~' => continue,
'?' | '#' => {
end = i;
break;
}
_ => return Err("Invalid character in path.".to_string())
}
}
if is_authority && end != 0 && !rawurl.starts_with("/") {
Err("Non-empty path must begin with \
'/' in presence of authority.".to_string())
} else {
Ok((try!(decode_component(rawurl.slice(0, end))),
rawurl.slice(end, len)))
}
}
// returns the parsed query and the fragment, if present
fn get_query_fragment(rawurl: &str) -> DecodeResult<(Query, Option<String>)> {
let (before_fragment, raw_fragment) = split_char_first(rawurl, '#');
// Parse the fragment if available
let fragment = match raw_fragment {
"" => None,
raw => Some(try!(decode_component(raw)))
};
match before_fragment.slice_shift_char() {
(Some('?'), rest) => Ok((try!(query_from_str(rest)), fragment)),
(None, "") => Ok((vec!(), fragment)),
_ => Err(format!("Query didn't start with '?': '{}..'", before_fragment)),
}
}
impl FromStr for Url {
fn from_str(s: &str) -> Option<Url> {
Url::parse(s).ok()
}
}
impl FromStr for Path {
fn from_str(s: &str) -> Option<Path> {
Path::parse(s).ok()
}
}
impl fmt::Show for Url {
/// Converts a URL from `Url` to string representation.
///
/// # Returns
///
/// A string that contains the formatted URL. Note that this will usually
/// be an inverse of `from_str` but might strip out unneeded separators;
/// for example, "http://somehost.com?", when parsed and formatted, will
/// result in just "http://somehost.com".
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "{}:", self.scheme));
if !self.host.is_empty() {
try!(write!(f, "//"));
match self.user {
Some(ref user) => try!(write!(f, "{}", *user)),
None => {}
}
match self.port {
Some(ref port) => try!(write!(f, "{}:{}", self.host,
*port)),
None => try!(write!(f, "{}", self.host)),
}
}
write!(f, "{}", self.path)
}
}
impl fmt::Show for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "{}", self.path));
if !self.query.is_empty() {
try!(write!(f, "?{}", query_to_str(&self.query)))
}
match self.fragment {
Some(ref fragment) => {
write!(f, "#{}", encode_component(fragment.as_slice()))
}
None => Ok(())
}
}
}
impl<S: hash::Writer> hash::Hash<S> for Url {
fn hash(&self, state: &mut S) {
self.to_string().hash(state)
}
}
impl<S: hash::Writer> hash::Hash<S> for Path {
fn hash(&self, state: &mut S) {
self.to_string().hash(state)
}
}
// Put a few tests outside of the 'test' module so they can test the internal
// functions and those functions don't need 'pub'
#[test]
fn test_split_char_first() {
let (u,v) = split_char_first("hello, sweet world", ',');
assert_eq!(u, "hello");
assert_eq!(v, " sweet world");
let (u,v) = split_char_first("hello sweet world", ',');
assert_eq!(u, "hello sweet world");
assert_eq!(v, "");
}
#[test]
fn test_get_authority() {
let (u, h, p, r) = get_authority(
"//user:[email protected]/something").unwrap();
assert_eq!(u, Some(UserInfo::new("user".to_string(), Some("pass".to_string()))));
assert_eq!(h, "rust-lang.org");
assert!(p.is_none());
assert_eq!(r, "/something");
let (u, h, p, r) = get_authority(
"//rust-lang.org:8000?something").unwrap();
assert!(u.is_none());
assert_eq!(h, "rust-lang.org");
assert_eq!(p, Some(8000));
assert_eq!(r, "?something");
let (u, h, p, r) = get_authority("//rust-lang.org#blah").unwrap();
assert!(u.is_none());
assert_eq!(h, "rust-lang.org");
assert!(p.is_none());
assert_eq!(r, "#blah");
// ipv6 tests
let (_, h, _, _) = get_authority(
"//2001:0db8:85a3:0042:0000:8a2e:0370:7334#blah").unwrap();
assert_eq!(h, "2001:0db8:85a3:0042:0000:8a2e:0370:7334");
let (_, h, p, _) = get_authority(
"//2001:0db8:85a3:0042:0000:8a2e:0370:7334:8000#blah").unwrap();
assert_eq!(h, "2001:0db8:85a3:0042:0000:8a2e:0370:7334");
assert_eq!(p, Some(8000));
let (u, h, p, _) = get_authority(
"//us:p@2001:0db8:85a3:0042:0000:8a2e:0370:7334:8000#blah"
).unwrap();
assert_eq!(u, Some(UserInfo::new("us".to_string(), Some("p".to_string()))));
assert_eq!(h, "2001:0db8:85a3:0042:0000:8a2e:0370:7334");
assert_eq!(p, Some(8000));
// invalid authorities;
assert!(get_authority("//user:pass@rust-lang:something").is_err());
assert!(get_authority("//user@rust-lang:something:/path").is_err());
assert!(get_authority(
"//2001:0db8:85a3:0042:0000:8a2e:0370:7334:800a").is_err());
assert!(get_authority(
"//2001:0db8:85a3:0042:0000:8a2e:0370:7334:8000:00").is_err());
// outside u16 range
assert!(get_authority("//user:pass@rust-lang:65536").is_err());
// these parse as empty, because they don't start with '//'
let (_, h, _, _) = get_authority("user:pass@rust-lang").unwrap();
assert_eq!(h, "");
let (_, h, _, _) = get_authority("rust-lang.org").unwrap();
assert_eq!(h, "");
}
#[test]
fn test_get_path() {
let (p, r) = get_path("/something+%20orother", true).unwrap();
assert_eq!(p, "/something+ orother".to_string());
assert_eq!(r, "");
let (p, r) = get_path("[email protected]#fragment", false).unwrap();
assert_eq!(p, "[email protected]".to_string());
assert_eq!(r, "#fragment");
let (p, r) = get_path("/gen/:addr=?q=v", false).unwrap();
assert_eq!(p, "/gen/:addr=".to_string());
assert_eq!(r, "?q=v");
//failure cases
assert!(get_path("something?q", true).is_err());
}
#[cfg(test)]
mod tests {
use {encode_form_urlencoded, decode_form_urlencoded, decode, encode,
encode_component, decode_component, UserInfo, get_scheme, Url, Path};
use std::collections::HashMap;
use std::path::BytesContainer;
#[test]
fn test_url_parse() {
let url = "http://user:[email protected]:8080/doc/~u?s=v#something";
let u = from_str::<Url>(url).unwrap();
assert_eq!(u.scheme, "http".to_string());
assert_eq!(u.user, Some(UserInfo::new("user".to_string(), Some("pass".to_string()))));
assert_eq!(u.host, "rust-lang.org".to_string());
assert_eq!(u.port, Some(8080));
assert_eq!(u.path.path, "/doc/~u".to_string());
assert_eq!(u.path.query, vec!(("s".to_string(), "v".to_string())));
assert_eq!(u.path.fragment, Some("something".to_string()));
}
#[test]
fn test_path_parse() {
let path = "/doc/~u?s=v#something";
let u = from_str::<Path>(path).unwrap();
assert_eq!(u.path, "/doc/~u".to_string());
assert_eq!(u.query, vec!(("s".to_string(), "v".to_string())));
assert_eq!(u.fragment, Some("something".to_string()));
}
#[test]
fn test_url_parse_host_slash() {
let urlstr = "http://0.42.42.42/";
let url = from_str::<Url>(urlstr).unwrap();
assert_eq!(url.host, "0.42.42.42".to_string());
assert_eq!(url.path.path, "/".to_string());
}
#[test]
fn test_path_parse_host_slash() {
let pathstr = "/";
let path = from_str::<Path>(pathstr).unwrap();
assert_eq!(path.path, "/".to_string());
}
#[test]
fn test_url_host_with_port() {
let urlstr = "scheme://host:1234";
let url = from_str::<Url>(urlstr).unwrap();
assert_eq!(url.scheme, "scheme".to_string());
assert_eq!(url.host, "host".to_string());
assert_eq!(url.port, Some(1234));
// is empty path really correct? Other tests think so
assert_eq!(url.path.path, "".to_string());
let urlstr = "scheme://host:1234/";
let url = from_str::<Url>(urlstr).unwrap();
assert_eq!(url.scheme, "scheme".to_string());
assert_eq!(url.host, "host".to_string());
assert_eq!(url.port, Some(1234));
assert_eq!(url.path.path, "/".to_string());
}
#[test]
fn test_url_with_underscores() {
let urlstr = "http://dotcom.com/file_name.html";
let url = from_str::<Url>(urlstr).unwrap();
assert_eq!(url.path.path, "/file_name.html".to_string());
}
#[test]
fn test_path_with_underscores() {
let pathstr = "/file_name.html";
let path = from_str::<Path>(pathstr).unwrap();
assert_eq!(path.path, "/file_name.html".to_string());
}
#[test]
fn test_url_with_dashes() {
let urlstr = "http://dotcom.com/file-name.html";
let url = from_str::<Url>(urlstr).unwrap();
assert_eq!(url.path.path, "/file-name.html".to_string());
}
#[test]
fn test_path_with_dashes() {
let pathstr = "/file-name.html";
let path = from_str::<Path>(pathstr).unwrap();
assert_eq!(path.path, "/file-name.html".to_string());
}
#[test]
fn test_no_scheme() {
assert!(get_scheme("noschemehere.html").is_err());
}
#[test]
fn test_invalid_scheme_errors() {
assert!(Url::parse("99://something").is_err());
assert!(Url::parse("://something").is_err());
}
#[test]
fn test_full_url_parse_and_format() {
let url = "http://user:[email protected]/doc?s=v#something";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), url);
}
#[test]
fn test_userless_url_parse_and_format() {
let url = "http://rust-lang.org/doc?s=v#something";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), url);
}
#[test]
fn test_queryless_url_parse_and_format() {
let url = "http://user:[email protected]/doc#something";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), url);
}
#[test]
fn test_empty_query_url_parse_and_format() {
let url = "http://user:[email protected]/doc?#something";
let should_be = "http://user:[email protected]/doc#something";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), should_be);
}
#[test]
fn test_fragmentless_url_parse_and_format() {
let url = "http://user:[email protected]/doc?q=v";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), url);
}
#[test]
fn test_minimal_url_parse_and_format() {
let url = "http://rust-lang.org/doc";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), url);
}
#[test]
fn test_url_with_port_parse_and_format() {
let url = "http://rust-lang.org:80/doc";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), url);
}
#[test]
fn test_scheme_host_only_url_parse_and_format() {
let url = "http://rust-lang.org";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), url);
}
#[test]
fn test_pathless_url_parse_and_format() {
let url = "http://user:[email protected]?q=v#something";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), url);
}
#[test]
fn test_scheme_host_fragment_only_url_parse_and_format() {
let url = "http://rust-lang.org#something";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), url);
}
#[test]
fn test_url_component_encoding() {
let url = "http://rust-lang.org/doc%20uments?ba%25d%20=%23%26%2B";
let u = from_str::<Url>(url).unwrap();
assert!(u.path.path == "/doc uments".to_string());
assert!(u.path.query == vec!(("ba%d ".to_string(), "#&+".to_string())));
}
#[test]
fn test_path_component_encoding() {
let path = "/doc%20uments?ba%25d%20=%23%26%2B";
let p = from_str::<Path>(path).unwrap();
assert!(p.path == "/doc uments".to_string());
assert!(p.query == vec!(("ba%d ".to_string(), "#&+".to_string())));
}
#[test]
fn test_url_without_authority() {
let url = "mailto:[email protected]";
let u = from_str::<Url>(url).unwrap();
assert_eq!(format!("{}", u).as_slice(), url);
}
#[test]
fn test_encode() {
fn t<T: BytesContainer>(input: T, expected: &str) {
assert_eq!(encode(input), expected.to_string())
}
t("", "");
t("http://example.com", "http://example.com");
t("foo bar% baz", "foo%20bar%25%20baz");
t(" ", "%20");
t("!", "!");
t("\"", "\"");
t("#", "#");
t("$", "$");
t("%", "%25");
t("&", "&");
t("'", "%27");
t("(", "(");
t(")", ")");
t("*", "*");
t("+", "+");
t(",", ",");
t("/", "/");
t(":", ":");
t(";", ";");
t("=", "=");
t("?", "?");
t("@", "@");
t("[", "[");
t("]", "]");
t("\0", "%00");
t("\n", "%0A");
let a: &[_] = &[0u8, 10, 37];
t(a, "%00%0A%25");
}
#[test]
fn test_encode_component() {
fn t<T: BytesContainer>(input: T, expected: &str) {
assert_eq!(encode_component(input), expected.to_string())
}
t("", "");
t("http://example.com", "http%3A%2F%2Fexample.com");
t("foo bar% baz", "foo%20bar%25%20baz");
t(" ", "%20");
t("!", "%21");
t("#", "%23");
t("$", "%24");
t("%", "%25");
t("&", "%26");
t("'", "%27");
t("(", "%28");
t(")", "%29");
t("*", "%2A");
t("+", "%2B");
t(",", "%2C");
t("/", "%2F");
t(":", "%3A");
t(";", "%3B");
t("=", "%3D");
t("?", "%3F");
t("@", "%40");
t("[", "%5B");
t("]", "%5D");
t("\0", "%00");
t("\n", "%0A");
let a: &[_] = &[0u8, 10, 37];
t(a, "%00%0A%25");
}
#[test]
fn test_decode() {
fn t<T: BytesContainer>(input: T, expected: &str) {
assert_eq!(decode(input), Ok(expected.to_string()))
}
assert!(decode("sadsadsda%").is_err());
assert!(decode("waeasd%4").is_err());
t("", "");
t("abc/def 123", "abc/def 123");
t("abc%2Fdef%20123", "abc%2Fdef 123");
t("%20", " ");
t("%21", "%21");
t("%22", "%22");
t("%23", "%23");
t("%24", "%24");
t("%25", "%");
t("%26", "%26");
t("%27", "'");
t("%28", "%28");
t("%29", "%29");
t("%2A", "%2A");
t("%2B", "%2B");
t("%2C", "%2C");
t("%2F", "%2F");
t("%3A", "%3A");
t("%3B", "%3B");
t("%3D", "%3D");
t("%3F", "%3F");
t("%40", "%40");
t("%5B", "%5B");
t("%5D", "%5D");
t("%00%0A%25".as_bytes(), "\0\n%");
}
#[test]
fn test_decode_component() {
fn t<T: BytesContainer>(input: T, expected: &str) {
assert_eq!(decode_component(input), Ok(expected.to_string()))
}
assert!(decode_component("asacsa%").is_err());
assert!(decode_component("acsas%4").is_err());
t("", "");
t("abc/def 123", "abc/def 123");
t("abc%2Fdef%20123", "abc/def 123");
t("%20", " ");
t("%21", "!");
t("%22", "\"");
t("%23", "#");
t("%24", "$");
t("%25", "%");
t("%26", "&");
t("%27", "'");
t("%28", "(");
t("%29", ")");
t("%2A", "*");
t("%2B", "+");
t("%2C", ",");
t("%2F", "/");
t("%3A", ":");
t("%3B", ";");
t("%3D", "=");
t("%3F", "?");
t("%40", "@");
t("%5B", "[");
t("%5D", "]");
t("%00%0A%25".as_bytes(), "\0\n%");
}
#[test]
fn test_encode_form_urlencoded() {
let mut m = HashMap::new();
assert_eq!(encode_form_urlencoded(&m), "".to_string());
m.insert("".to_string(), vec!());
m.insert("foo".to_string(), vec!());
assert_eq!(encode_form_urlencoded(&m), "".to_string());
let mut m = HashMap::new();
m.insert("foo".to_string(), vec!("bar".to_string(), "123".to_string()));
assert_eq!(encode_form_urlencoded(&m), "foo=bar&foo=123".to_string());
let mut m = HashMap::new();
m.insert("foo bar".to_string(), vec!("abc".to_string(), "12 = 34".to_string()));
assert_eq!(encode_form_urlencoded(&m),
"foo+bar=abc&foo+bar=12+%3D+34".to_string());
}
#[test]
fn test_decode_form_urlencoded() {
assert_eq!(decode_form_urlencoded([]).unwrap().len(), 0);
let s = "a=1&foo+bar=abc&foo+bar=12+%3D+34".as_bytes();
let form = decode_form_urlencoded(s).unwrap();
assert_eq!(form.len(), 2);
assert_eq!(form.get(&"a".to_string()), &vec!("1".to_string()));
assert_eq!(form.get(&"foo bar".to_string()),
&vec!("abc".to_string(), "12 = 34".to_string()));
}
}<|fim▁end|>
|
#[deprecated="use `Path::parse`"]
pub fn path_from_str(s: &str) -> Result<Path, String> {
Path::parse(s)
|
<|file_name|>case4_air_quality_analysis9.py<|end_file_name|><|fim▁begin|>fig, ax = plt.subplots()
data['2012':].mean().plot(kind='bar', ax=ax, rot=0, color='C0')
ax.set_ylabel("NO$_2$ concentration (µg/m³)")
ax.axhline(y=40., color='darkorange')
ax.text(0.01, 0.48, 'Yearly limit is 40 µg/m³',
horizontalalignment='left', fontsize=13, <|fim▁hole|><|fim▁end|>
|
transform=ax.transAxes, color='darkorange');
|
<|file_name|>log.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# log.py - Copyright (C) 2015 Red Hat, Inc.
# Written by Ryan Barry <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA. A copy of the GNU General Public License is
# also available at http://www.gnu.org/copyleft/gpl.html.
import logging
import logging.config
"""
Logging for the oVirt Node Dbus Backend. Since we're running from
systemd, send default messages there and let journald handle it. Debug
goes in /tmp if we're running in debug mode.
"""
DEBUG_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s %(message)s'
}
},
'handlers': {
'console': {
'level': 'INFO',
'formatter': 'standard',
'class': 'logging.StreamHandler'
},
'debug': {
'level': 'DEBUG',
'formatter': 'standard',
'class': 'logging.handlers.WatchedFileHandler',
'filename': '/tmp/ovirt-node-dbus.debug.log'
}
},
'loggers': {
'': {
'handlers': ['console', 'debug'],
'level': 'DEBUG',
'propagate': False
}
}
}
LOGGING = DEBUG_LOGGING.copy()
LOGGING.update({
'handlers': {
'console': {
'level': 'INFO',
'formatter': 'standard',
'class': 'logging.StreamHandler'
}
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False
}
}<|fim▁hole|>def configure_logging(debug=False):
log_config = DEBUG_LOGGING if debug else LOGGING
logging.config.dictConfig(log_config)
def getLogger(name=None):
if not getLogger._logger:
if not logging.getLogger().handlers:
configure_logging()
getLogger._logger = logging.getLogger()
fullname = ".".join([getLogger._logger.name, name]) if name else name
return logging.getLogger(fullname)
getLogger._logger = None<|fim▁end|>
|
})
|
<|file_name|>test_is_thue_morse.py<|end_file_name|><|fim▁begin|>"""Test."""
import pytest
TM_TABLE = [
([0, 1, 1, 0, 1], True),
([0], True),
([1], False),
([0, 1, 0, 0], False),<|fim▁hole|>]
@pytest.mark.parametrize("n, result", TM_TABLE)
def test_is_thue_morse(n, result):
"""Test."""
from is_thue_morse import is_thue_morse
assert is_thue_morse(n) == result<|fim▁end|>
| |
<|file_name|>util.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2010 Michael Buesch <[email protected]>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
#include "util.h"
#include <time.h>
#include <errno.h>
#include <string.h>
#include <iostream>
void msleep(unsigned int msecs)
{
int err;
struct timespec time;
time.tv_sec = 0;<|fim▁hole|> }
time.tv_nsec = msecs;
time.tv_nsec *= 1000000;
do {
err = nanosleep(&time, &time);
} while (err && errno == EINTR);
if (err) {
std::cerr << "nanosleep() failed with: "
<< strerror(errno) << std::endl;
}
}<|fim▁end|>
|
while (msecs >= 1000) {
time.tv_sec++;
msecs -= 1000;
|
<|file_name|>SCORM13API.java<|end_file_name|><|fim▁begin|>package org.sakaiproject.scorm.ui.player.behaviors;
import org.adl.api.ecmascript.SCORM13APIInterface;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.scorm.model.api.ScoBean;
import org.sakaiproject.scorm.model.api.SessionBean;
import org.sakaiproject.scorm.navigation.INavigable;
import org.sakaiproject.scorm.navigation.INavigationEvent;
import org.sakaiproject.scorm.service.api.ScormApplicationService;
import org.sakaiproject.scorm.service.api.ScormSequencingService;
public abstract class SCORM13API implements SCORM13APIInterface {
private static Log log = LogFactory.getLog(SCORM13API.class);
// String value of FALSE for JavaScript returns.
protected static final String STRING_FALSE = "false";
// String value of TRUE for JavaScript returns.
protected static final String STRING_TRUE = "true";
public abstract SessionBean getSessionBean();
public abstract ScormApplicationService getApplicationService();
public abstract ScormSequencingService getSequencingService();
public abstract ScoBean getScoBean();
public abstract INavigable getAgent();
public abstract Object getTarget();
// Implementation of SCORM13APIInterface
public String Commit(String parameter) {
// TODO: Disable UI controls -- or throttle them on server -- don't mess with js
// Assume failure
String result = STRING_FALSE;
if (null == getSessionBean()) {
log.error("Null run state!");
}
if (getApplicationService().commit(parameter, getSessionBean(), getScoBean()))
result = STRING_TRUE;
// TODO: Enable UI controls
return result;
}
public String GetDiagnostic(String errorCode) {
return getApplicationService().getDiagnostic(errorCode, getSessionBean());
}
public String GetErrorString(String errorCode) {
return getApplicationService().getErrorString(errorCode, getSessionBean());
}
public String GetLastError() {
return getApplicationService().getLastError(getSessionBean());
}
public String GetValue(String parameter) {
return getApplicationService().getValue(parameter, getSessionBean(), getScoBean());
}
public String Initialize(String parameter) {
// Assume failure
String result = STRING_FALSE;
if (getApplicationService().initialize(parameter, getSessionBean(), getScoBean()))
result = STRING_TRUE;
return result;
}
public String SetValue(String dataModelElement, String value) {
// Assume failure
String result = STRING_FALSE;
if (getApplicationService().setValue(dataModelElement, value, getSessionBean(), getScoBean())) {
result = STRING_TRUE;
}
return result;
}
public String Terminate(String parameter) {
// Assume failure
String result = STRING_FALSE;
if (null == getSessionBean()) {
log.error("Null run state!");
return result;
}
INavigationEvent navigationEvent = getApplicationService().newNavigationEvent();
boolean isSuccessful = getApplicationService().terminate(parameter, navigationEvent,
getSessionBean(), getScoBean());<|fim▁hole|> if (navigationEvent.isChoiceEvent()) {
getSequencingService().navigate(navigationEvent.getChoiceEvent(), getSessionBean(), getAgent(), getTarget());
} else {
getSequencingService().navigate(navigationEvent.getEvent(), getSessionBean(), getAgent(), getTarget());
}
}
return result;
}
}<|fim▁end|>
|
if (isSuccessful) {
result = STRING_TRUE;
|
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian<|fim▁hole|>fn main() {
println!("cargo:rustc-flags=-l usp10");
}<|fim▁end|>
|
// Licensed under the MIT License <LICENSE.md>
|
<|file_name|>EqualElement.java<|end_file_name|><|fim▁begin|>/*
* lemonjuice - Java Template Engine.
* Copyright (C) 2009-2012 Manuel Tomis [email protected]
*
* This library is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library. If not, see <http://www.gnu.org/licenses/>.
*/<|fim▁hole|>
package com.codegremlins.lemonjuice.engine;
import com.codegremlins.lemonjuice.TemplateContext;
import com.codegremlins.lemonjuice.util.Functions;
class EqualElement extends Element {
private Element left;
private Element right;
private boolean condition;
public EqualElement(boolean condition, Element left, Element right) {
this.condition = condition;
this.left = left;
this.right = right;
}
@Override
public Object evaluate(TemplateContext model) throws Exception {
Object lvalue = left.evaluate(model);
Object rvalue = right.evaluate(model);
return condition == Functions.compareEqual(lvalue, rvalue);
}
}<|fim▁end|>
| |
<|file_name|>MapleTVEffect.java<|end_file_name|><|fim▁begin|>/*
This file is part of the OdinMS Maple Story Server
Copyright (C) 2008 Patrick Huy <[email protected]>
Matthias Butz <[email protected]>
Jan Christian Meyer <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation version 3 as published by
the Free Software Foundation. You may not use, modify or distribute
this program under any other version of the GNU Affero General Public
License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package server.maps;
import java.rmi.RemoteException;
import java.util.List;
import client.MapleCharacter;
import java.util.ArrayList;
import net.world.remote.WorldChannelInterface;
import server.TimerManager;
import tools.MaplePacketCreator;
/*
* MapleTVEffect
* @author MrXotic
*/
public class MapleTVEffect {
private List<String> message = new ArrayList<String>(5);
private MapleCharacter user;
private static boolean active;
private int type;
private MapleCharacter partner;
public MapleTVEffect(MapleCharacter user_, MapleCharacter partner_, List<String> msg, int type_) {
this.message = msg;
this.user = user_;
this.type = type_;
this.partner = partner_;
broadcastTV(true);
}
public static boolean isActive() {
return active;
}
private void setActive(boolean set) {
active = set;
}
private void broadcastTV(boolean active_) {
WorldChannelInterface wci = user.getClient().getChannelServer().getWorldInterface();<|fim▁hole|> try {
if (active_) {
wci.broadcastMessage(null, MaplePacketCreator.enableTV().getBytes());
wci.broadcastMessage(null, MaplePacketCreator.sendTV(user, message, type <= 2 ? type : type - 3, partner).getBytes());
int delay = 15000;
if (type == 4) {
delay = 30000;
} else if (type == 5) {
delay = 60000;
}
TimerManager.getInstance().schedule(new Runnable() {
@Override
public void run() {
broadcastTV(false);
}
}, delay);
} else {
wci.broadcastMessage(null, MaplePacketCreator.removeTV().getBytes());
}
} catch (RemoteException re) {
user.getClient().getChannelServer().reconnectWorld();
}
}
}<|fim▁end|>
|
setActive(active_);
|
<|file_name|>test_provider.py<|end_file_name|><|fim▁begin|>import numpy as np
from esdl.cube_provider import CubeSourceProvider
from esdl.cube_config import CubeConfig
class TestCubeSourceProvider(CubeSourceProvider):
"""
CubeSourceProvider implementation used for testing cube generation without any source files.
<|fim▁hole|>
:param cube_config: Specifies the fixed layout and conventions used for the cube.
:param name: The provider's registration name. Defaults to ``"test"``.
:param var: Name of a (float32) variable which will be filled with random numbers.
"""
def __init__(self, cube_config: CubeConfig, name: str = 'test', var: str = 'test'):
super(TestCubeSourceProvider, self).__init__(cube_config, name)
self._variable_name = var
self._value = 0.0
def prepare(self):
pass
@property
def temporal_coverage(self):
return self.cube_config.start_time, self.cube_config.end_time
@property
def spatial_coverage(self):
return 0, 0, self.cube_config.grid_width, self.cube_config.grid_height
@property
def variable_descriptors(self):
return {
self._variable_name: {
'data_type': np.float32,
'fill_value': np.nan,
'scale_factor': 1.0,
'add_offset': 0.0,
}
}
def compute_variable_images(self, period_start, period_end):
self._value += 0.1
image_width = self.cube_config.grid_width
image_height = self.cube_config.grid_height
image_shape = (image_height, image_width)
return {
self._variable_name: np.full(image_shape, self._value, dtype=np.float32)
}
def close(self):
pass<|fim▁end|>
|
The following usage generates a cube with two variables ``test_1`` and ``test_2``:
cube-gen -c ./myconf.py ./mycube test:var=test_1 test:var=test_2
|
<|file_name|>log.rs<|end_file_name|><|fim▁begin|>//! Logger implementation
use std::fs::File;
use std::io::Write;
use std::fs::OpenOptions;
use std::path::PathBuf;
use std::collections::HashMap;
use std::sync::{Mutex, MutexGuard};
use std::borrow::Borrow;
use std;
/// Logger category. Logger can be configured to save
/// messages of each category to a separate file.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum LoggerCategory {
Status,
Error,
DebugGeneral,
DebugMoveFiles,
DebugTemplateInstantiation,
DebugInheritance,
DebugParserSkips,
DebugParser,
DebugFfiSkips,
DebugSignals,
DebugAllocationPlace,
DebugRustSkips,
DebugQtDoc,
DebugQtDocDeclarations,
DebugQtHeaderNames,
}
pub use self::LoggerCategory::*;
/// Specifies where the logging messages should be sent.
#[derive(Debug)]
pub struct LoggerSettings {
/// Write messages to specified file path. If `None`,
/// logging to file is disabled.
pub file_path: Option<PathBuf>,
/// Write messages to stderr.
pub write_to_stderr: bool,
}
impl Default for LoggerSettings {
fn default() -> LoggerSettings {
LoggerSettings {
file_path: None,
write_to_stderr: true,
}
}
}
impl LoggerSettings {
/// Returns false if messages are ignored. This function
/// can be used to skip expensive construction of messages.
fn is_on(&self) -> bool {
self.write_to_stderr || self.file_path.is_some()
}
}
/// Logger object. One logger manages messages of all categories.
/// It's possible to use multiple loggers independently.
/// Use `default_logger()` to get global `Logger` instance.
/// Note that the instance is mutex-guarded.
#[derive(Default)]
pub struct Logger {<|fim▁hole|> files: HashMap<LoggerCategory, File>,
}
impl Logger {
/// Creates a new logger.
pub fn new() -> Logger {
Logger::default()
}
/// Set settings for all categories that don't have specific category settings.
pub fn set_default_settings(&mut self, value: LoggerSettings) {
self.default_settings = value;
self.files.clear();
}
/// Set settings for `category`.
pub fn set_category_settings(&mut self, category: LoggerCategory, value: LoggerSettings) {
self.category_settings.insert(category, value);
self.files.remove(&category);
}
/// Set all specific category settings. Old category settings are removed.
pub fn set_all_category_settings(&mut self, value: HashMap<LoggerCategory, LoggerSettings>) {
self.category_settings = value;
self.files.clear();
}
/// Returns false if messages of `category` are ignored. This function
/// can be used to skip expensive construction of messages.
pub fn is_on(&self, category: LoggerCategory) -> bool {
self.settings(category).is_on()
}
/// Lazy-log. If messages of `category` are not ignored, calls the passed closure
/// and uses its output value as a message in that category.
pub fn llog<T: Borrow<str>, F: FnOnce() -> T>(&mut self, category: LoggerCategory, f: F) {
let settings = if let Some(data) = self.category_settings.get(&category) {
data
} else {
&self.default_settings
};
if !settings.is_on() {
return;
}
let text = f();
if settings.write_to_stderr {
std::io::stderr()
.write(text.borrow().as_bytes())
.unwrap();
std::io::stderr().write(b"\n").unwrap();
}
if let Some(ref path) = settings.file_path {
if !self.files.contains_key(&category) {
let file =
OpenOptions::new()
.write(true)
.create(true)
.append(true)
.open(path)
.unwrap_or_else(|err| panic!("failed to open log file '{}': {}", path.display(), err));
self.files.insert(category, file);
}
let mut file = self.files.get_mut(&category).unwrap();
file.write(text.borrow().as_bytes()).unwrap();
file.write(b"\n").unwrap();
}
}
/// Log a message `text` to `category`.
pub fn log<T: Borrow<str>>(&mut self, category: LoggerCategory, text: T) {
self.llog(category, move || text);
}
/// Returns settings for `category`.
fn settings(&self, category: LoggerCategory) -> &LoggerSettings {
if let Some(data) = self.category_settings.get(&category) {
data
} else {
&self.default_settings
}
}
}
lazy_static! {
pub static ref DEFAULT_LOGGER: Mutex<Logger> = Mutex::new(Logger::new());
}
/// Returns global instance of `Logger`.
pub fn default_logger() -> MutexGuard<'static, Logger> {
DEFAULT_LOGGER.lock().unwrap()
}
/// Convenience method to log status messages to the default logger.
pub fn status<T: Borrow<str>>(text: T) {
default_logger().log(LoggerCategory::Status, text);
}
/// Convenience method to log error messages to the default logger.
pub fn error<T: Borrow<str>>(text: T) {
default_logger().log(LoggerCategory::Error, text);
}
/// Convenience method to log messages to the default logger and specified `category`.
pub fn log<T: Borrow<str>>(category: LoggerCategory, text: T) {
default_logger().log(category, text);
}
/// Convenience method to lazy-log messages to the default logger and specified `category`.
/// If messages of `category` are not ignored, calls the passed closure
/// and uses its output value as a message in that category.
pub fn llog<T: Borrow<str>, F: FnOnce() -> T>(category: LoggerCategory, f: F) {
default_logger().llog(category, f);
}
/// Convenience method to check if `category` is enabled in the default logger.
pub fn is_on(category: LoggerCategory) -> bool {
default_logger().is_on(category)
}<|fim▁end|>
|
default_settings: LoggerSettings,
category_settings: HashMap<LoggerCategory, LoggerSettings>,
|
<|file_name|>sr_masterctl.rs<|end_file_name|><|fim▁begin|>// SairaDB - A distributed database
// Copyright (C) 2015 by Siyu Wang
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 2
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
<|fim▁hole|>use std::net::TcpStream;
use std::thread;
use std::sync::Arc;
use std::sync::mpsc::Sender;
use std::sync::atomic::AtomicUsize;
use super::libc;
pub fn init(masters: Vec<String>, vnodes: Vec<u64>,
map: &HashMap<String, String>, log_sender: Sender<String>) {
let vnodes = Arc::new(vnodes);
let cookie = map.get("cookie-master").unwrap().to_string();
let port = map.get("master-port").unwrap().to_string();
for master in masters {
let vnodes = vnodes.clone();
let log_sender = log_sender.clone();
let master = master.to_string();
let cookie = cookie.to_string();
let port = port.to_string();
let _ = thread::Builder::new().name(format!("master_task({})", master))
.spawn(|| {
master_task(master, port, vnodes, cookie, log_sender);
});
}
}
fn master_task(ip: String, port: String, vnodes: Arc<Vec<u64>>, cookie: String,
log_sender: Sender<String>) {
let addr: &str = &(ip + ":" + &port);
let count = Arc::new(AtomicUsize::new(0));
loop {
let stream = TcpStream::connect(addr);
//match stream.write_all(cookie.as_bytes());
}
}
fn master_connection() {
}<|fim▁end|>
|
use std::collections::HashMap;
|
<|file_name|>blockDoc.rs<|end_file_name|><|fim▁begin|>/*! # Iterator
*
* The heart and soul of this module is the [`Iterator`] trait. The core of<|fim▁hole|> * trait Iterator {
* type Item;
* fn next(&mut self) -> Option<Self::Item>;
* }
* ```
*
* An iterator has a method, [`next()`], which when called, returns an
* [`Option`]`<Item>`. [`next()`] will return `Some(Item)` as long as there
* are elements, and once they've all been exhausted, will return `None` to
* indicate that iteration is finished. Individual iterators may choose to
* resume iteration, and so calling [`next()`] again may or may not eventually
* start returning `Some(Item)` again at some point.
*
* [`Iterator`]'s full definition includes a number of other methods as well,
* but they are default methods, built on top of [`next()`], and so you get
* them for free.
*
* Iterators are also composable, and it's common to chain them together to do
* more complex forms of processing. See the [Adapters](#adapters) section
* below for more details.
*
* [`Iterator`]: trait.Iterator.html
* [`next()`]: trait.Iterator.html#tymethod.next
* [`Option`]: ../../std/option/enum.Option.html
*/
/**The `Option` type. See [the module level documentation](index.html) for more.*/
/** Lorem ipsum dolor sit amet, consectetur adipiscing elit,
* sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
* Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris
* nisi ut aliquip ex ea commodo consequat.
*/
/**
missing asterisk
*/
/**
```
missing asterisk
```
*/<|fim▁end|>
|
* [`Iterator`] looks like this:
*
* ```
|
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import sys
import gc
import pygame
from pygame.locals import *
from input import *
import snd
TICKS_PER_SECOND = 25
GAMETICKS = 1000 / TICKS_PER_SECOND
def set_game_speed( slowdown ):
global TICKS_PER_SECOND
global GAMETICKS
TICKS_PER_SECOND = int( 25 * slowdown )
GAMETICKS = 1000 / TICKS_PER_SECOND
class Game:
def __init__( self, name, configuration ):
self.config = configuration
self.name = name
def init_pygame( self ):
snd.pre_init()
# Init the display
pygame.init()
self.userinput = UserInput()
if not self.config.is_fullscreen:
pygame.display.set_mode( self.config.resolution )
else:
pygame.display.set_mode( self.config.resolution, pygame.FULLSCREEN )
pygame.display.set_caption( self.name )
# Init the input
pygame.mouse.set_visible( False )
pygame.event.set_grab( False )
snd.init()
def deinit_pygame( self ):
snd.deinit()
pygame.quit()
def before_gameloop( self ):
<|fim▁hole|> pass
def after_gameloop( self ):
pass
def run( self ):
try:
self.init_pygame()
self.before_gameloop()
self.fps = 0
frame_count = 0
next_game_tick = pygame.time.get_ticks()
next_half_second = pygame.time.get_ticks()
# main loop
self.game_is_done = False
while not self.game_is_done:
# events
self.handle_events()
# game tick
loop_count = 0
while pygame.time.get_ticks() > next_game_tick and loop_count < 4:
x, y = pygame.mouse.get_pos()
self.userinput.mouse.feed_pos( Vec2D(x, y) )
self.do_tick( self.userinput )
self.userinput.update()
next_game_tick += GAMETICKS
loop_count += 1
## gc.collect()
if loop_count >= 4: # don't overdo the ticks
next_game_tick = pygame.time.get_ticks()
# render
time_sec = pygame.time.get_ticks() * 0.001
interpol = 1 - ((next_game_tick - pygame.time.get_ticks()) / float(GAMETICKS))
self.render(pygame.display.get_surface(), interpol, time_sec )
pygame.display.flip()
frame_count += 1
if pygame.time.get_ticks() > next_half_second:
self.fps = 2 * frame_count
frame_count = 0
next_half_second += 500
self.after_gameloop()
self.deinit_pygame()
except:
self.deinit_pygame()
print "Unexpected error:", sys.exc_info()[0]
raise
def handle_events( self ):
for event in pygame.event.get():
if event.type == QUIT:
self.game_is_done = True
elif event.type == KEYDOWN:
self.userinput.key.feed_down( event.key )
self.userinput.key.feed_char( event.unicode )
elif event.type == KEYUP:
self.userinput.key.feed_up( event.key )
elif event.type == MOUSEBUTTONDOWN:
self.userinput.mouse.feed_down( event.button )
self.state.mouse_down( event.button )
elif event.type == MOUSEBUTTONUP:
self.userinput.mouse.feed_up( event.button )
elif event.type == JOYBUTTONDOWN:
self.userinput.joys[event.joy].feed_down( event.button )
elif event.type == JOYBUTTONUP:
self.userinput.joys[event.joy].feed_up( event.button )
def draw_fps( self, surface ):
font = pygame.font.Font( None, 20 )
render_text = font.render( str(self.fps), 0, (255,255,255) )
surface.blit( render_text, (10,10) )<|fim▁end|>
| |
<|file_name|>permissions.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group as PermissionGroup
from django.forms.models import modelform_factory
from django.http.response import HttpResponseRedirect
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import UpdateView
from shuup.admin.forms.fields import Select2MultipleField
from shuup.admin.toolbar import get_default_edit_toolbar
from shuup.admin.utils.urls import get_model_url
class PermissionChangeFormBase(forms.ModelForm):
old_password = forms.CharField(
label=_("Your Password"),
widget=forms.PasswordInput,
help_text=_("For security purposes, we need your current password.")
)
def __init__(self, changing_user, *args, **kwargs):
super(PermissionChangeFormBase, self).__init__(*args, **kwargs)
self.changing_user = changing_user
if getattr(self.instance, 'is_superuser', False) and not getattr(self.changing_user, 'is_superuser', False):
self.fields.pop("is_superuser")
if not (
self.changing_user == self.instance or
getattr(self.instance, 'is_superuser', False)
):
# Only require old password when editing
self.fields.pop("old_password")
initial_groups = self._get_initial_groups()
permission_groups_field = Select2MultipleField(
model=PermissionGroup,
initial=[group.pk for group in initial_groups],
required=False,
label=_("Permission Groups"),
help_text=_(
"The permission groups that this user belongs to. "
"Permission groups are configured through Contacts - Permission Groups."
)
)
permission_groups_field.widget.choices = [(group.pk, force_text(group)) for group in initial_groups]
self.fields["permission_groups"] = permission_groups_field
def _get_initial_groups(self):
if self.instance.pk and hasattr(self.instance, "groups"):
return self.instance.groups.all()
else:
return []
def clean_old_password(self):
"""
Validates that the old_password field is correct.
"""
old_password = self.cleaned_data["old_password"]
if not self.changing_user.check_password(old_password):
raise forms.ValidationError(
_("Your old password was entered incorrectly. Please enter it again."),
code='password_incorrect',
)
return old_password
def clean_members(self):
members = self.cleaned_data.get("members", [])
return get_user_model().objects.filter(pk__in=members).all()
def clean_permission_groups(self):
permission_groups = self.cleaned_data.get("permission_groups", [])
return PermissionGroup.objects.filter(pk__in=permission_groups)
def clean(self):
for field in ("is_staff", "is_superuser"):
flag = self.cleaned_data[field]
if self.changing_user == self.instance and not flag:
self.add_error(field, _("You can't unset this status for yourself."))
return self.cleaned_data
def save(self):
obj = super(PermissionChangeFormBase, self).save()
obj.groups.clear()
obj.groups = self.cleaned_data["permission_groups"]
class UserChangePermissionsView(UpdateView):
template_name = "shuup/admin/users/change_permissions.jinja"
model = settings.AUTH_USER_MODEL
title = _("Change User Permissions")
def get_form_class(self):
return modelform_factory(
model=get_user_model(),
form=PermissionChangeFormBase,
fields=("is_staff", "is_superuser")
)
def get_queryset(self):
return get_user_model().objects.all()
def get_toolbar(self):
toolbar = get_default_edit_toolbar(
self,
"permissions_form",<|fim▁hole|> return toolbar
def get_form_kwargs(self):
kwargs = super(UserChangePermissionsView, self).get_form_kwargs()
kwargs["changing_user"] = self.request.user
return kwargs
def get_context_data(self, **kwargs):
context = super(UserChangePermissionsView, self).get_context_data(**kwargs)
context["toolbar"] = self.get_toolbar()
context["title"] = _("Change Permissions: %s") % self.object
return context
def form_valid(self, form):
form.save()
messages.success(self.request, _("Permissions changed for %s.") % self.object)
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
return get_model_url(self.object)<|fim▁end|>
|
discard_url=get_model_url(self.object),
with_split_save=False
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.